expmed.c (emit_store_flag): Extract updated comparison code from the return value...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
75
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
83
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
86
87 /* This structure is used by move_by_pieces to describe the move to
88 be performed. */
89 struct move_by_pieces
90 {
91 rtx to;
92 rtx to_addr;
93 int autinc_to;
94 int explicit_inc_to;
95 rtx from;
96 rtx from_addr;
97 int autinc_from;
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 int reverse;
102 };
103
104 /* This structure is used by store_by_pieces to describe the clear to
105 be performed. */
106
107 struct store_by_pieces
108 {
109 rtx to;
110 rtx to_addr;
111 int autinc_to;
112 int explicit_inc_to;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
116 PTR constfundata;
117 int reverse;
118 };
119
120 extern struct obstack permanent_obstack;
121
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
125 unsigned int));
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
129 enum machine_mode));
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
131 unsigned int));
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
133 unsigned int));
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
135 enum machine_mode,
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
147 int));
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
154 rtx, rtx));
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
158 #endif
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
160
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
164
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
167
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
170
171 #ifndef MOVE_RATIO
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
173 #define MOVE_RATIO 2
174 #else
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
177 #endif
178 #endif
179
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
185 #endif
186
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
189
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
192
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
194
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
197 #endif
198 \f
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
201
202 void
203 init_expr_once ()
204 {
205 rtx insn, pat;
206 enum machine_mode mode;
207 int num_clobbers;
208 rtx mem, mem1;
209
210 start_sequence ();
211
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
217
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
220
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
223 {
224 int regno;
225 rtx reg;
226
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
230
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
233
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
237 regno++)
238 {
239 if (! HARD_REGNO_MODE_OK (regno, mode))
240 continue;
241
242 reg = gen_rtx_REG (mode, regno);
243
244 SET_SRC (pat) = mem;
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
248
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
253
254 SET_SRC (pat) = reg;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
258
259 SET_SRC (pat) = reg;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
263 }
264 }
265
266 end_sequence ();
267 }
268
269 /* This is run at the start of compiling a function. */
270
271 void
272 init_expr ()
273 {
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
275
276 pending_chain = 0;
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
280 saveregs_value = 0;
281 apply_args_value = 0;
282 forced_labels = 0;
283 }
284
285 void
286 mark_expr_status (p)
287 struct expr_status *p;
288 {
289 if (p == NULL)
290 return;
291
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
295 }
296
297 void
298 free_expr_status (f)
299 struct function *f;
300 {
301 free (f->expr);
302 f->expr = NULL;
303 }
304
305 /* Small sanity check that the queue is empty at the end of a function. */
306
307 void
308 finish_expr_for_function ()
309 {
310 if (pending_chain)
311 abort ();
312 }
313 \f
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
316
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
320
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
323
324 static rtx
325 enqueue_insn (var, body)
326 rtx var, body;
327 {
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
331 }
332
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
339
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
343
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
347
348 rtx
349 protect_from_queue (x, modify)
350 rtx x;
351 int modify;
352 {
353 RTX_CODE code = GET_CODE (x);
354
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
358 return x;
359 #endif
360
361 if (code != QUEUED)
362 {
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
367 shared. */
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
370 {
371 rtx y = XEXP (x, 0);
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
373
374 if (QUEUED_INSN (y))
375 {
376 rtx temp = gen_reg_rtx (GET_MODE (x));
377
378 emit_insn_before (gen_move_insn (temp, new),
379 QUEUED_INSN (y));
380 return temp;
381 }
382
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
386 }
387
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
390 if (code == MEM)
391 {
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
394 {
395 x = copy_rtx (x);
396 XEXP (x, 0) = tem;
397 }
398 }
399 else if (code == PLUS || code == MULT)
400 {
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
404 {
405 x = copy_rtx (x);
406 XEXP (x, 0) = new0;
407 XEXP (x, 1) = new1;
408 }
409 }
410 return x;
411 }
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
414 emit_queue. */
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
418 use that copy. */
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
425 QUEUED_INSN (x));
426 return QUEUED_COPY (x);
427 }
428
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
433
434 int
435 queued_subexp_p (x)
436 rtx x;
437 {
438 enum rtx_code code = GET_CODE (x);
439 switch (code)
440 {
441 case QUEUED:
442 return 1;
443 case MEM:
444 return queued_subexp_p (XEXP (x, 0));
445 case MULT:
446 case PLUS:
447 case MINUS:
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
450 default:
451 return 0;
452 }
453 }
454
455 /* Perform all the pending incrementations. */
456
457 void
458 emit_queue ()
459 {
460 rtx p;
461 while ((p = pending_chain))
462 {
463 rtx body = QUEUED_BODY (p);
464
465 if (GET_CODE (body) == SEQUENCE)
466 {
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
469 }
470 else
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
473 }
474 }
475 \f
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
480
481 void
482 convert_move (to, from, unsignedp)
483 rtx to, from;
484 int unsignedp;
485 {
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
490 enum insn_code code;
491 rtx libcall;
492
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
495
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
498
499 if (to_real != from_real)
500 abort ();
501
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
504 TO here. */
505
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
511
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
513 abort ();
514
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
517 {
518 emit_move_insn (to, from);
519 return;
520 }
521
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
523 {
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
525 abort ();
526
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
529 else
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
531
532 emit_move_insn (to, from);
533 return;
534 }
535
536 if (to_real != from_real)
537 abort ();
538
539 if (to_real)
540 {
541 rtx value, insns;
542
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
544 {
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
547 != CODE_FOR_nothing)
548 {
549 emit_unop_insn (code, to, from, UNKNOWN);
550 return;
551 }
552 }
553
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
556 {
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
558 return;
559 }
560 #endif
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
563 {
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
565 return;
566 }
567 #endif
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
570 {
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
572 return;
573 }
574 #endif
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
577 {
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
579 return;
580 }
581 #endif
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
584 {
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
586 return;
587 }
588 #endif
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
591 {
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
593 return;
594 }
595 #endif
596
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
599 {
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
601 return;
602 }
603 #endif
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
606 {
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
608 return;
609 }
610 #endif
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
613 {
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
615 return;
616 }
617 #endif
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
620 {
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
622 return;
623 }
624 #endif
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
627 {
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
629 return;
630 }
631 #endif
632
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 {
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
694 return;
695 }
696 #endif
697
698 libcall = (rtx) 0;
699 switch (from_mode)
700 {
701 case SFmode:
702 switch (to_mode)
703 {
704 case DFmode:
705 libcall = extendsfdf2_libfunc;
706 break;
707
708 case XFmode:
709 libcall = extendsfxf2_libfunc;
710 break;
711
712 case TFmode:
713 libcall = extendsftf2_libfunc;
714 break;
715
716 default:
717 break;
718 }
719 break;
720
721 case DFmode:
722 switch (to_mode)
723 {
724 case SFmode:
725 libcall = truncdfsf2_libfunc;
726 break;
727
728 case XFmode:
729 libcall = extenddfxf2_libfunc;
730 break;
731
732 case TFmode:
733 libcall = extenddftf2_libfunc;
734 break;
735
736 default:
737 break;
738 }
739 break;
740
741 case XFmode:
742 switch (to_mode)
743 {
744 case SFmode:
745 libcall = truncxfsf2_libfunc;
746 break;
747
748 case DFmode:
749 libcall = truncxfdf2_libfunc;
750 break;
751
752 default:
753 break;
754 }
755 break;
756
757 case TFmode:
758 switch (to_mode)
759 {
760 case SFmode:
761 libcall = trunctfsf2_libfunc;
762 break;
763
764 case DFmode:
765 libcall = trunctfdf2_libfunc;
766 break;
767
768 default:
769 break;
770 }
771 break;
772
773 default:
774 break;
775 }
776
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
779 abort ();
780
781 start_sequence ();
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
783 1, from, from_mode);
784 insns = get_insns ();
785 end_sequence ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
787 from));
788 return;
789 }
790
791 /* Now both modes are integers. */
792
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
796 {
797 rtx insns;
798 rtx lowpart;
799 rtx fill_value;
800 rtx lowfrom;
801 int i;
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
804
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
807 != CODE_FOR_nothing)
808 {
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
816 return;
817 }
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
822 {
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
828 return;
829 }
830
831 /* No special multiword conversion insn; do it by hand. */
832 start_sequence ();
833
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
836
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
839
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
843 else
844 lowpart_mode = from_mode;
845
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
847
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
850
851 /* Compute the value to put in each remaining word. */
852 if (unsignedp)
853 fill_value = const0_rtx;
854 else
855 {
856 #ifdef HAVE_slt
857 if (HAVE_slt
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
860 {
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
862 lowpart_mode, 0);
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
865 }
866 else
867 #endif
868 {
869 fill_value
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
872 NULL_RTX, 0);
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
874 }
875 }
876
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
879 {
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
882
883 if (subword == 0)
884 abort ();
885
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
888 }
889
890 insns = get_insns ();
891 end_sequence ();
892
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
895 return;
896 }
897
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
901 {
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
910 return;
911 }
912
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
915 {
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
918
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
921 {
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
923 return;
924 }
925 #endif /* HAVE_truncqipqi2 */
926 abort ();
927 }
928
929 if (from_mode == PQImode)
930 {
931 if (to_mode != QImode)
932 {
933 from = convert_to_mode (QImode, from, unsignedp);
934 from_mode = QImode;
935 }
936 else
937 {
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
940 {
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
942 return;
943 }
944 #endif /* HAVE_extendpqiqi2 */
945 abort ();
946 }
947 }
948
949 if (to_mode == PSImode)
950 {
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
953
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
956 {
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
958 return;
959 }
960 #endif /* HAVE_truncsipsi2 */
961 abort ();
962 }
963
964 if (from_mode == PSImode)
965 {
966 if (to_mode != SImode)
967 {
968 from = convert_to_mode (SImode, from, unsignedp);
969 from_mode = SImode;
970 }
971 else
972 {
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
975 {
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
977 return;
978 }
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
982 {
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
984 return;
985 }
986 #endif /* HAVE_zero_extendpsisi2 */
987 abort ();
988 }
989 }
990
991 if (to_mode == PDImode)
992 {
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
995
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
998 {
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_truncdipdi2 */
1003 abort ();
1004 }
1005
1006 if (from_mode == PDImode)
1007 {
1008 if (to_mode != DImode)
1009 {
1010 from = convert_to_mode (DImode, from, unsignedp);
1011 from_mode = DImode;
1012 }
1013 else
1014 {
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1017 {
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019 return;
1020 }
1021 #endif /* HAVE_extendpdidi2 */
1022 abort ();
1023 }
1024 }
1025
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1028
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1033 {
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1045 return;
1046 }
1047
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1050 {
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1054 {
1055 emit_unop_insn (code, to, from, equiv_code);
1056 return;
1057 }
1058 else
1059 {
1060 enum machine_mode intermediate;
1061 rtx tmp;
1062 tree shift_amount;
1063
1064 /* Search for a mode to convert via. */
1065 for (intermediate = from_mode; intermediate != VOIDmode;
1066 intermediate = GET_MODE_WIDER_MODE (intermediate))
1067 if (((can_extend_p (to_mode, intermediate, unsignedp)
1068 != CODE_FOR_nothing)
1069 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1070 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1071 GET_MODE_BITSIZE (intermediate))))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1074 {
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1077 return;
1078 }
1079
1080 /* No suitable intermediate mode.
1081 Generate what we need with shifts. */
1082 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1083 - GET_MODE_BITSIZE (from_mode), 0);
1084 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1085 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1086 to, unsignedp);
1087 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1088 to, unsignedp);
1089 if (tmp != to)
1090 emit_move_insn (to, tmp);
1091 return;
1092 }
1093 }
1094
1095 /* Support special truncate insns for certain modes. */
1096
1097 if (from_mode == DImode && to_mode == SImode)
1098 {
1099 #ifdef HAVE_truncdisi2
1100 if (HAVE_truncdisi2)
1101 {
1102 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1103 return;
1104 }
1105 #endif
1106 convert_move (to, force_reg (from_mode, from), unsignedp);
1107 return;
1108 }
1109
1110 if (from_mode == DImode && to_mode == HImode)
1111 {
1112 #ifdef HAVE_truncdihi2
1113 if (HAVE_truncdihi2)
1114 {
1115 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1116 return;
1117 }
1118 #endif
1119 convert_move (to, force_reg (from_mode, from), unsignedp);
1120 return;
1121 }
1122
1123 if (from_mode == DImode && to_mode == QImode)
1124 {
1125 #ifdef HAVE_truncdiqi2
1126 if (HAVE_truncdiqi2)
1127 {
1128 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1129 return;
1130 }
1131 #endif
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1133 return;
1134 }
1135
1136 if (from_mode == SImode && to_mode == HImode)
1137 {
1138 #ifdef HAVE_truncsihi2
1139 if (HAVE_truncsihi2)
1140 {
1141 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1142 return;
1143 }
1144 #endif
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1146 return;
1147 }
1148
1149 if (from_mode == SImode && to_mode == QImode)
1150 {
1151 #ifdef HAVE_truncsiqi2
1152 if (HAVE_truncsiqi2)
1153 {
1154 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1155 return;
1156 }
1157 #endif
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1159 return;
1160 }
1161
1162 if (from_mode == HImode && to_mode == QImode)
1163 {
1164 #ifdef HAVE_trunchiqi2
1165 if (HAVE_trunchiqi2)
1166 {
1167 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1168 return;
1169 }
1170 #endif
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 return;
1173 }
1174
1175 if (from_mode == TImode && to_mode == DImode)
1176 {
1177 #ifdef HAVE_trunctidi2
1178 if (HAVE_trunctidi2)
1179 {
1180 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1181 return;
1182 }
1183 #endif
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 return;
1186 }
1187
1188 if (from_mode == TImode && to_mode == SImode)
1189 {
1190 #ifdef HAVE_trunctisi2
1191 if (HAVE_trunctisi2)
1192 {
1193 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1194 return;
1195 }
1196 #endif
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 return;
1199 }
1200
1201 if (from_mode == TImode && to_mode == HImode)
1202 {
1203 #ifdef HAVE_trunctihi2
1204 if (HAVE_trunctihi2)
1205 {
1206 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1207 return;
1208 }
1209 #endif
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 return;
1212 }
1213
1214 if (from_mode == TImode && to_mode == QImode)
1215 {
1216 #ifdef HAVE_trunctiqi2
1217 if (HAVE_trunctiqi2)
1218 {
1219 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1220 return;
1221 }
1222 #endif
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 return;
1225 }
1226
1227 /* Handle truncation of volatile memrefs, and so on;
1228 the things that couldn't be truncated directly,
1229 and for which there was no special instruction. */
1230 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1231 {
1232 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1233 emit_move_insn (to, temp);
1234 return;
1235 }
1236
1237 /* Mode combination is not recognized. */
1238 abort ();
1239 }
1240
1241 /* Return an rtx for a value that would result
1242 from converting X to mode MODE.
1243 Both X and MODE may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245 This can be done by referring to a part of X in place
1246 or by copying to a new temporary with conversion.
1247
1248 This function *must not* call protect_from_queue
1249 except when putting X into an insn (in which case convert_move does it). */
1250
1251 rtx
1252 convert_to_mode (mode, x, unsignedp)
1253 enum machine_mode mode;
1254 rtx x;
1255 int unsignedp;
1256 {
1257 return convert_modes (mode, VOIDmode, x, unsignedp);
1258 }
1259
1260 /* Return an rtx for a value that would result
1261 from converting X from mode OLDMODE to mode MODE.
1262 Both modes may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264
1265 This can be done by referring to a part of X in place
1266 or by copying to a new temporary with conversion.
1267
1268 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1269
1270 This function *must not* call protect_from_queue
1271 except when putting X into an insn (in which case convert_move does it). */
1272
1273 rtx
1274 convert_modes (mode, oldmode, x, unsignedp)
1275 enum machine_mode mode, oldmode;
1276 rtx x;
1277 int unsignedp;
1278 {
1279 rtx temp;
1280
1281 /* If FROM is a SUBREG that indicates that we have already done at least
1282 the required extension, strip it. */
1283
1284 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1285 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1286 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1287 x = gen_lowpart (mode, x);
1288
1289 if (GET_MODE (x) != VOIDmode)
1290 oldmode = GET_MODE (x);
1291
1292 if (mode == oldmode)
1293 return x;
1294
1295 /* There is one case that we must handle specially: If we are converting
1296 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1297 we are to interpret the constant as unsigned, gen_lowpart will do
1298 the wrong if the constant appears negative. What we want to do is
1299 make the high-order word of the constant zero, not all ones. */
1300
1301 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1302 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1303 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1304 {
1305 HOST_WIDE_INT val = INTVAL (x);
1306
1307 if (oldmode != VOIDmode
1308 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1309 {
1310 int width = GET_MODE_BITSIZE (oldmode);
1311
1312 /* We need to zero extend VAL. */
1313 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1314 }
1315
1316 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1317 }
1318
1319 /* We can do this with a gen_lowpart if both desired and current modes
1320 are integer, and this is either a constant integer, a register, or a
1321 non-volatile MEM. Except for the constant case where MODE is no
1322 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1323
1324 if ((GET_CODE (x) == CONST_INT
1325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1326 || (GET_MODE_CLASS (mode) == MODE_INT
1327 && GET_MODE_CLASS (oldmode) == MODE_INT
1328 && (GET_CODE (x) == CONST_DOUBLE
1329 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1330 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1331 && direct_load[(int) mode])
1332 || (GET_CODE (x) == REG
1333 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1334 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1335 {
1336 /* ?? If we don't know OLDMODE, we have to assume here that
1337 X does not need sign- or zero-extension. This may not be
1338 the case, but it's the best we can do. */
1339 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1340 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1341 {
1342 HOST_WIDE_INT val = INTVAL (x);
1343 int width = GET_MODE_BITSIZE (oldmode);
1344
1345 /* We must sign or zero-extend in this case. Start by
1346 zero-extending, then sign extend if we need to. */
1347 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1348 if (! unsignedp
1349 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1350 val |= (HOST_WIDE_INT) (-1) << width;
1351
1352 return GEN_INT (trunc_int_for_mode (val, mode));
1353 }
1354
1355 return gen_lowpart (mode, x);
1356 }
1357
1358 temp = gen_reg_rtx (mode);
1359 convert_move (temp, x, unsignedp);
1360 return temp;
1361 }
1362 \f
1363 /* This macro is used to determine what the largest unit size that
1364 move_by_pieces can use is. */
1365
1366 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1367 move efficiently, as opposed to MOVE_MAX which is the maximum
1368 number of bytes we can move with a single instruction. */
1369
1370 #ifndef MOVE_MAX_PIECES
1371 #define MOVE_MAX_PIECES MOVE_MAX
1372 #endif
1373
1374 /* Generate several move instructions to copy LEN bytes from block FROM to
1375 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1376 and TO through protect_from_queue before calling.
1377
1378 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1379 used to push FROM to the stack.
1380
1381 ALIGN is maximum alignment we can assume. */
1382
1383 void
1384 move_by_pieces (to, from, len, align)
1385 rtx to, from;
1386 unsigned HOST_WIDE_INT len;
1387 unsigned int align;
1388 {
1389 struct move_by_pieces data;
1390 rtx to_addr, from_addr = XEXP (from, 0);
1391 unsigned int max_size = MOVE_MAX_PIECES + 1;
1392 enum machine_mode mode = VOIDmode, tmode;
1393 enum insn_code icode;
1394
1395 data.offset = 0;
1396 data.from_addr = from_addr;
1397 if (to)
1398 {
1399 to_addr = XEXP (to, 0);
1400 data.to = to;
1401 data.autinc_to
1402 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1403 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1404 data.reverse
1405 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1406 }
1407 else
1408 {
1409 to_addr = NULL_RTX;
1410 data.to = NULL_RTX;
1411 data.autinc_to = 1;
1412 #ifdef STACK_GROWS_DOWNWARD
1413 data.reverse = 1;
1414 #else
1415 data.reverse = 0;
1416 #endif
1417 }
1418 data.to_addr = to_addr;
1419 data.from = from;
1420 data.autinc_from
1421 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1422 || GET_CODE (from_addr) == POST_INC
1423 || GET_CODE (from_addr) == POST_DEC);
1424
1425 data.explicit_inc_from = 0;
1426 data.explicit_inc_to = 0;
1427 if (data.reverse) data.offset = len;
1428 data.len = len;
1429
1430 /* If copying requires more than two move insns,
1431 copy addresses to registers (to make displacements shorter)
1432 and use post-increment if available. */
1433 if (!(data.autinc_from && data.autinc_to)
1434 && move_by_pieces_ninsns (len, align) > 2)
1435 {
1436 /* Find the mode of the largest move... */
1437 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1438 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1439 if (GET_MODE_SIZE (tmode) < max_size)
1440 mode = tmode;
1441
1442 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1443 {
1444 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1445 data.autinc_from = 1;
1446 data.explicit_inc_from = -1;
1447 }
1448 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1449 {
1450 data.from_addr = copy_addr_to_reg (from_addr);
1451 data.autinc_from = 1;
1452 data.explicit_inc_from = 1;
1453 }
1454 if (!data.autinc_from && CONSTANT_P (from_addr))
1455 data.from_addr = copy_addr_to_reg (from_addr);
1456 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1457 {
1458 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1459 data.autinc_to = 1;
1460 data.explicit_inc_to = -1;
1461 }
1462 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1463 {
1464 data.to_addr = copy_addr_to_reg (to_addr);
1465 data.autinc_to = 1;
1466 data.explicit_inc_to = 1;
1467 }
1468 if (!data.autinc_to && CONSTANT_P (to_addr))
1469 data.to_addr = copy_addr_to_reg (to_addr);
1470 }
1471
1472 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1473 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1474 align = MOVE_MAX * BITS_PER_UNIT;
1475
1476 /* First move what we can in the largest integer mode, then go to
1477 successively smaller modes. */
1478
1479 while (max_size > 1)
1480 {
1481 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1482 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1483 if (GET_MODE_SIZE (tmode) < max_size)
1484 mode = tmode;
1485
1486 if (mode == VOIDmode)
1487 break;
1488
1489 icode = mov_optab->handlers[(int) mode].insn_code;
1490 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1491 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1492
1493 max_size = GET_MODE_SIZE (mode);
1494 }
1495
1496 /* The code above should have handled everything. */
1497 if (data.len > 0)
1498 abort ();
1499 }
1500
1501 /* Return number of insns required to move L bytes by pieces.
1502 ALIGN (in bits) is maximum alignment we can assume. */
1503
1504 static unsigned HOST_WIDE_INT
1505 move_by_pieces_ninsns (l, align)
1506 unsigned HOST_WIDE_INT l;
1507 unsigned int align;
1508 {
1509 unsigned HOST_WIDE_INT n_insns = 0;
1510 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1511
1512 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1513 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1514 align = MOVE_MAX * BITS_PER_UNIT;
1515
1516 while (max_size > 1)
1517 {
1518 enum machine_mode mode = VOIDmode, tmode;
1519 enum insn_code icode;
1520
1521 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1522 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1523 if (GET_MODE_SIZE (tmode) < max_size)
1524 mode = tmode;
1525
1526 if (mode == VOIDmode)
1527 break;
1528
1529 icode = mov_optab->handlers[(int) mode].insn_code;
1530 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1531 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1532
1533 max_size = GET_MODE_SIZE (mode);
1534 }
1535
1536 if (l)
1537 abort ();
1538 return n_insns;
1539 }
1540
1541 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1542 with move instructions for mode MODE. GENFUN is the gen_... function
1543 to make a move insn for that mode. DATA has all the other info. */
1544
1545 static void
1546 move_by_pieces_1 (genfun, mode, data)
1547 rtx (*genfun) PARAMS ((rtx, ...));
1548 enum machine_mode mode;
1549 struct move_by_pieces *data;
1550 {
1551 unsigned int size = GET_MODE_SIZE (mode);
1552 rtx to1 = NULL_RTX, from1;
1553
1554 while (data->len >= size)
1555 {
1556 if (data->reverse)
1557 data->offset -= size;
1558
1559 if (data->to)
1560 {
1561 if (data->autinc_to)
1562 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1563 data->offset);
1564 else
1565 to1 = adjust_address (data->to, mode, data->offset);
1566 }
1567
1568 if (data->autinc_from)
1569 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1570 data->offset);
1571 else
1572 from1 = adjust_address (data->from, mode, data->offset);
1573
1574 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1575 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1576 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1577 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1578
1579 if (data->to)
1580 emit_insn ((*genfun) (to1, from1));
1581 else
1582 {
1583 #ifdef PUSH_ROUNDING
1584 emit_single_push_insn (mode, from1, NULL);
1585 #else
1586 abort ();
1587 #endif
1588 }
1589
1590 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1591 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1592 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1593 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1594
1595 if (! data->reverse)
1596 data->offset += size;
1597
1598 data->len -= size;
1599 }
1600 }
1601 \f
1602 /* Emit code to move a block Y to a block X.
1603 This may be done with string-move instructions,
1604 with multiple scalar move instructions, or with a library call.
1605
1606 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1607 with mode BLKmode.
1608 SIZE is an rtx that says how long they are.
1609 ALIGN is the maximum alignment we can assume they have.
1610
1611 Return the address of the new block, if memcpy is called and returns it,
1612 0 otherwise. */
1613
1614 rtx
1615 emit_block_move (x, y, size)
1616 rtx x, y;
1617 rtx size;
1618 {
1619 rtx retval = 0;
1620 #ifdef TARGET_MEM_FUNCTIONS
1621 static tree fn;
1622 tree call_expr, arg_list;
1623 #endif
1624 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1625
1626 if (GET_MODE (x) != BLKmode)
1627 abort ();
1628
1629 if (GET_MODE (y) != BLKmode)
1630 abort ();
1631
1632 x = protect_from_queue (x, 1);
1633 y = protect_from_queue (y, 0);
1634 size = protect_from_queue (size, 0);
1635
1636 if (GET_CODE (x) != MEM)
1637 abort ();
1638 if (GET_CODE (y) != MEM)
1639 abort ();
1640 if (size == 0)
1641 abort ();
1642
1643 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1644 move_by_pieces (x, y, INTVAL (size), align);
1645 else
1646 {
1647 /* Try the most limited insn first, because there's no point
1648 including more than one in the machine description unless
1649 the more limited one has some advantage. */
1650
1651 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1652 enum machine_mode mode;
1653
1654 /* Since this is a move insn, we don't care about volatility. */
1655 volatile_ok = 1;
1656
1657 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1658 mode = GET_MODE_WIDER_MODE (mode))
1659 {
1660 enum insn_code code = movstr_optab[(int) mode];
1661 insn_operand_predicate_fn pred;
1662
1663 if (code != CODE_FOR_nothing
1664 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1665 here because if SIZE is less than the mode mask, as it is
1666 returned by the macro, it will definitely be less than the
1667 actual mode mask. */
1668 && ((GET_CODE (size) == CONST_INT
1669 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1670 <= (GET_MODE_MASK (mode) >> 1)))
1671 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1672 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1673 || (*pred) (x, BLKmode))
1674 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1675 || (*pred) (y, BLKmode))
1676 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1677 || (*pred) (opalign, VOIDmode)))
1678 {
1679 rtx op2;
1680 rtx last = get_last_insn ();
1681 rtx pat;
1682
1683 op2 = convert_to_mode (mode, size, 1);
1684 pred = insn_data[(int) code].operand[2].predicate;
1685 if (pred != 0 && ! (*pred) (op2, mode))
1686 op2 = copy_to_mode_reg (mode, op2);
1687
1688 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1689 if (pat)
1690 {
1691 emit_insn (pat);
1692 volatile_ok = 0;
1693 return 0;
1694 }
1695 else
1696 delete_insns_since (last);
1697 }
1698 }
1699
1700 volatile_ok = 0;
1701
1702 /* X, Y, or SIZE may have been passed through protect_from_queue.
1703
1704 It is unsafe to save the value generated by protect_from_queue
1705 and reuse it later. Consider what happens if emit_queue is
1706 called before the return value from protect_from_queue is used.
1707
1708 Expansion of the CALL_EXPR below will call emit_queue before
1709 we are finished emitting RTL for argument setup. So if we are
1710 not careful we could get the wrong value for an argument.
1711
1712 To avoid this problem we go ahead and emit code to copy X, Y &
1713 SIZE into new pseudos. We can then place those new pseudos
1714 into an RTL_EXPR and use them later, even after a call to
1715 emit_queue.
1716
1717 Note this is not strictly needed for library calls since they
1718 do not call emit_queue before loading their arguments. However,
1719 we may need to have library calls call emit_queue in the future
1720 since failing to do so could cause problems for targets which
1721 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1722 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1723 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1724
1725 #ifdef TARGET_MEM_FUNCTIONS
1726 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1727 #else
1728 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1729 TREE_UNSIGNED (integer_type_node));
1730 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1731 #endif
1732
1733 #ifdef TARGET_MEM_FUNCTIONS
1734 /* It is incorrect to use the libcall calling conventions to call
1735 memcpy in this context.
1736
1737 This could be a user call to memcpy and the user may wish to
1738 examine the return value from memcpy.
1739
1740 For targets where libcalls and normal calls have different conventions
1741 for returning pointers, we could end up generating incorrect code.
1742
1743 So instead of using a libcall sequence we build up a suitable
1744 CALL_EXPR and expand the call in the normal fashion. */
1745 if (fn == NULL_TREE)
1746 {
1747 tree fntype;
1748
1749 /* This was copied from except.c, I don't know if all this is
1750 necessary in this context or not. */
1751 fn = get_identifier ("memcpy");
1752 fntype = build_pointer_type (void_type_node);
1753 fntype = build_function_type (fntype, NULL_TREE);
1754 fn = build_decl (FUNCTION_DECL, fn, fntype);
1755 ggc_add_tree_root (&fn, 1);
1756 DECL_EXTERNAL (fn) = 1;
1757 TREE_PUBLIC (fn) = 1;
1758 DECL_ARTIFICIAL (fn) = 1;
1759 TREE_NOTHROW (fn) = 1;
1760 make_decl_rtl (fn, NULL);
1761 assemble_external (fn);
1762 }
1763
1764 /* We need to make an argument list for the function call.
1765
1766 memcpy has three arguments, the first two are void * addresses and
1767 the last is a size_t byte count for the copy. */
1768 arg_list
1769 = build_tree_list (NULL_TREE,
1770 make_tree (build_pointer_type (void_type_node), x));
1771 TREE_CHAIN (arg_list)
1772 = build_tree_list (NULL_TREE,
1773 make_tree (build_pointer_type (void_type_node), y));
1774 TREE_CHAIN (TREE_CHAIN (arg_list))
1775 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1776 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1777
1778 /* Now we have to build up the CALL_EXPR itself. */
1779 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1780 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1781 call_expr, arg_list, NULL_TREE);
1782 TREE_SIDE_EFFECTS (call_expr) = 1;
1783
1784 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1785 #else
1786 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1787 VOIDmode, 3, y, Pmode, x, Pmode,
1788 convert_to_mode (TYPE_MODE (integer_type_node), size,
1789 TREE_UNSIGNED (integer_type_node)),
1790 TYPE_MODE (integer_type_node));
1791 #endif
1792
1793 /* If we are initializing a readonly value, show the above call
1794 clobbered it. Otherwise, a load from it may erroneously be hoisted
1795 from a loop. */
1796 if (RTX_UNCHANGING_P (x))
1797 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1798 }
1799
1800 return retval;
1801 }
1802 \f
1803 /* Copy all or part of a value X into registers starting at REGNO.
1804 The number of registers to be filled is NREGS. */
1805
1806 void
1807 move_block_to_reg (regno, x, nregs, mode)
1808 int regno;
1809 rtx x;
1810 int nregs;
1811 enum machine_mode mode;
1812 {
1813 int i;
1814 #ifdef HAVE_load_multiple
1815 rtx pat;
1816 rtx last;
1817 #endif
1818
1819 if (nregs == 0)
1820 return;
1821
1822 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1823 x = validize_mem (force_const_mem (mode, x));
1824
1825 /* See if the machine can do this with a load multiple insn. */
1826 #ifdef HAVE_load_multiple
1827 if (HAVE_load_multiple)
1828 {
1829 last = get_last_insn ();
1830 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1831 GEN_INT (nregs));
1832 if (pat)
1833 {
1834 emit_insn (pat);
1835 return;
1836 }
1837 else
1838 delete_insns_since (last);
1839 }
1840 #endif
1841
1842 for (i = 0; i < nregs; i++)
1843 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1844 operand_subword_force (x, i, mode));
1845 }
1846
1847 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1848 The number of registers to be filled is NREGS. SIZE indicates the number
1849 of bytes in the object X. */
1850
1851 void
1852 move_block_from_reg (regno, x, nregs, size)
1853 int regno;
1854 rtx x;
1855 int nregs;
1856 int size;
1857 {
1858 int i;
1859 #ifdef HAVE_store_multiple
1860 rtx pat;
1861 rtx last;
1862 #endif
1863 enum machine_mode mode;
1864
1865 if (nregs == 0)
1866 return;
1867
1868 /* If SIZE is that of a mode no bigger than a word, just use that
1869 mode's store operation. */
1870 if (size <= UNITS_PER_WORD
1871 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1872 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1873 {
1874 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1875 return;
1876 }
1877
1878 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1879 to the left before storing to memory. Note that the previous test
1880 doesn't handle all cases (e.g. SIZE == 3). */
1881 if (size < UNITS_PER_WORD
1882 && BYTES_BIG_ENDIAN
1883 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1884 {
1885 rtx tem = operand_subword (x, 0, 1, BLKmode);
1886 rtx shift;
1887
1888 if (tem == 0)
1889 abort ();
1890
1891 shift = expand_shift (LSHIFT_EXPR, word_mode,
1892 gen_rtx_REG (word_mode, regno),
1893 build_int_2 ((UNITS_PER_WORD - size)
1894 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1895 emit_move_insn (tem, shift);
1896 return;
1897 }
1898
1899 /* See if the machine can do this with a store multiple insn. */
1900 #ifdef HAVE_store_multiple
1901 if (HAVE_store_multiple)
1902 {
1903 last = get_last_insn ();
1904 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1905 GEN_INT (nregs));
1906 if (pat)
1907 {
1908 emit_insn (pat);
1909 return;
1910 }
1911 else
1912 delete_insns_since (last);
1913 }
1914 #endif
1915
1916 for (i = 0; i < nregs; i++)
1917 {
1918 rtx tem = operand_subword (x, i, 1, BLKmode);
1919
1920 if (tem == 0)
1921 abort ();
1922
1923 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1924 }
1925 }
1926
1927 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1928 registers represented by a PARALLEL. SSIZE represents the total size of
1929 block SRC in bytes, or -1 if not known. */
1930 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1931 the balance will be in what would be the low-order memory addresses, i.e.
1932 left justified for big endian, right justified for little endian. This
1933 happens to be true for the targets currently using this support. If this
1934 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1935 would be needed. */
1936
1937 void
1938 emit_group_load (dst, orig_src, ssize)
1939 rtx dst, orig_src;
1940 int ssize;
1941 {
1942 rtx *tmps, src;
1943 int start, i;
1944
1945 if (GET_CODE (dst) != PARALLEL)
1946 abort ();
1947
1948 /* Check for a NULL entry, used to indicate that the parameter goes
1949 both on the stack and in registers. */
1950 if (XEXP (XVECEXP (dst, 0, 0), 0))
1951 start = 0;
1952 else
1953 start = 1;
1954
1955 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1956
1957 /* Process the pieces. */
1958 for (i = start; i < XVECLEN (dst, 0); i++)
1959 {
1960 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1961 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1962 unsigned int bytelen = GET_MODE_SIZE (mode);
1963 int shift = 0;
1964
1965 /* Handle trailing fragments that run over the size of the struct. */
1966 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1967 {
1968 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1969 bytelen = ssize - bytepos;
1970 if (bytelen <= 0)
1971 abort ();
1972 }
1973
1974 /* If we won't be loading directly from memory, protect the real source
1975 from strange tricks we might play; but make sure that the source can
1976 be loaded directly into the destination. */
1977 src = orig_src;
1978 if (GET_CODE (orig_src) != MEM
1979 && (!CONSTANT_P (orig_src)
1980 || (GET_MODE (orig_src) != mode
1981 && GET_MODE (orig_src) != VOIDmode)))
1982 {
1983 if (GET_MODE (orig_src) == VOIDmode)
1984 src = gen_reg_rtx (mode);
1985 else
1986 src = gen_reg_rtx (GET_MODE (orig_src));
1987
1988 emit_move_insn (src, orig_src);
1989 }
1990
1991 /* Optimize the access just a bit. */
1992 if (GET_CODE (src) == MEM
1993 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1994 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1995 && bytelen == GET_MODE_SIZE (mode))
1996 {
1997 tmps[i] = gen_reg_rtx (mode);
1998 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1999 }
2000 else if (GET_CODE (src) == CONCAT)
2001 {
2002 if (bytepos == 0
2003 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2004 tmps[i] = XEXP (src, 0);
2005 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2006 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2007 tmps[i] = XEXP (src, 1);
2008 else if (bytepos == 0)
2009 {
2010 rtx mem = assign_stack_temp (GET_MODE (src),
2011 GET_MODE_SIZE (GET_MODE (src)), 0);
2012 emit_move_insn (mem, src);
2013 tmps[i] = adjust_address (mem, mode, 0);
2014 }
2015 else
2016 abort ();
2017 }
2018 else if (CONSTANT_P (src)
2019 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2020 tmps[i] = src;
2021 else
2022 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2023 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2024 mode, mode, ssize);
2025
2026 if (BYTES_BIG_ENDIAN && shift)
2027 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2028 tmps[i], 0, OPTAB_WIDEN);
2029 }
2030
2031 emit_queue ();
2032
2033 /* Copy the extracted pieces into the proper (probable) hard regs. */
2034 for (i = start; i < XVECLEN (dst, 0); i++)
2035 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2036 }
2037
2038 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2039 registers represented by a PARALLEL. SSIZE represents the total size of
2040 block DST, or -1 if not known. */
2041
2042 void
2043 emit_group_store (orig_dst, src, ssize)
2044 rtx orig_dst, src;
2045 int ssize;
2046 {
2047 rtx *tmps, dst;
2048 int start, i;
2049
2050 if (GET_CODE (src) != PARALLEL)
2051 abort ();
2052
2053 /* Check for a NULL entry, used to indicate that the parameter goes
2054 both on the stack and in registers. */
2055 if (XEXP (XVECEXP (src, 0, 0), 0))
2056 start = 0;
2057 else
2058 start = 1;
2059
2060 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2061
2062 /* Copy the (probable) hard regs into pseudos. */
2063 for (i = start; i < XVECLEN (src, 0); i++)
2064 {
2065 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2066 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2067 emit_move_insn (tmps[i], reg);
2068 }
2069 emit_queue ();
2070
2071 /* If we won't be storing directly into memory, protect the real destination
2072 from strange tricks we might play. */
2073 dst = orig_dst;
2074 if (GET_CODE (dst) == PARALLEL)
2075 {
2076 rtx temp;
2077
2078 /* We can get a PARALLEL dst if there is a conditional expression in
2079 a return statement. In that case, the dst and src are the same,
2080 so no action is necessary. */
2081 if (rtx_equal_p (dst, src))
2082 return;
2083
2084 /* It is unclear if we can ever reach here, but we may as well handle
2085 it. Allocate a temporary, and split this into a store/load to/from
2086 the temporary. */
2087
2088 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2089 emit_group_store (temp, src, ssize);
2090 emit_group_load (dst, temp, ssize);
2091 return;
2092 }
2093 else if (GET_CODE (dst) != MEM)
2094 {
2095 dst = gen_reg_rtx (GET_MODE (orig_dst));
2096 /* Make life a bit easier for combine. */
2097 emit_move_insn (dst, const0_rtx);
2098 }
2099
2100 /* Process the pieces. */
2101 for (i = start; i < XVECLEN (src, 0); i++)
2102 {
2103 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2104 enum machine_mode mode = GET_MODE (tmps[i]);
2105 unsigned int bytelen = GET_MODE_SIZE (mode);
2106
2107 /* Handle trailing fragments that run over the size of the struct. */
2108 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2109 {
2110 if (BYTES_BIG_ENDIAN)
2111 {
2112 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2113 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2114 tmps[i], 0, OPTAB_WIDEN);
2115 }
2116 bytelen = ssize - bytepos;
2117 }
2118
2119 /* Optimize the access just a bit. */
2120 if (GET_CODE (dst) == MEM
2121 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2122 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2123 && bytelen == GET_MODE_SIZE (mode))
2124 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2125 else
2126 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2127 mode, tmps[i], ssize);
2128 }
2129
2130 emit_queue ();
2131
2132 /* Copy from the pseudo into the (probable) hard reg. */
2133 if (GET_CODE (dst) == REG)
2134 emit_move_insn (orig_dst, dst);
2135 }
2136
2137 /* Generate code to copy a BLKmode object of TYPE out of a
2138 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2139 is null, a stack temporary is created. TGTBLK is returned.
2140
2141 The primary purpose of this routine is to handle functions
2142 that return BLKmode structures in registers. Some machines
2143 (the PA for example) want to return all small structures
2144 in registers regardless of the structure's alignment. */
2145
2146 rtx
2147 copy_blkmode_from_reg (tgtblk, srcreg, type)
2148 rtx tgtblk;
2149 rtx srcreg;
2150 tree type;
2151 {
2152 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2153 rtx src = NULL, dst = NULL;
2154 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2155 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2156
2157 if (tgtblk == 0)
2158 {
2159 tgtblk = assign_temp (build_qualified_type (type,
2160 (TYPE_QUALS (type)
2161 | TYPE_QUAL_CONST)),
2162 0, 1, 1);
2163 preserve_temp_slots (tgtblk);
2164 }
2165
2166 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2167 into a new pseudo which is a full word.
2168
2169 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2170 the wrong part of the register gets copied so we fake a type conversion
2171 in place. */
2172 if (GET_MODE (srcreg) != BLKmode
2173 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2174 {
2175 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2176 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2177 else
2178 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2179 }
2180
2181 /* Structures whose size is not a multiple of a word are aligned
2182 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2183 machine, this means we must skip the empty high order bytes when
2184 calculating the bit offset. */
2185 if (BYTES_BIG_ENDIAN
2186 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2187 && bytes % UNITS_PER_WORD)
2188 big_endian_correction
2189 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2190
2191 /* Copy the structure BITSIZE bites at a time.
2192
2193 We could probably emit more efficient code for machines which do not use
2194 strict alignment, but it doesn't seem worth the effort at the current
2195 time. */
2196 for (bitpos = 0, xbitpos = big_endian_correction;
2197 bitpos < bytes * BITS_PER_UNIT;
2198 bitpos += bitsize, xbitpos += bitsize)
2199 {
2200 /* We need a new source operand each time xbitpos is on a
2201 word boundary and when xbitpos == big_endian_correction
2202 (the first time through). */
2203 if (xbitpos % BITS_PER_WORD == 0
2204 || xbitpos == big_endian_correction)
2205 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2206 GET_MODE (srcreg));
2207
2208 /* We need a new destination operand each time bitpos is on
2209 a word boundary. */
2210 if (bitpos % BITS_PER_WORD == 0)
2211 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2212
2213 /* Use xbitpos for the source extraction (right justified) and
2214 xbitpos for the destination store (left justified). */
2215 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2216 extract_bit_field (src, bitsize,
2217 xbitpos % BITS_PER_WORD, 1,
2218 NULL_RTX, word_mode, word_mode,
2219 BITS_PER_WORD),
2220 BITS_PER_WORD);
2221 }
2222
2223 return tgtblk;
2224 }
2225
2226 /* Add a USE expression for REG to the (possibly empty) list pointed
2227 to by CALL_FUSAGE. REG must denote a hard register. */
2228
2229 void
2230 use_reg (call_fusage, reg)
2231 rtx *call_fusage, reg;
2232 {
2233 if (GET_CODE (reg) != REG
2234 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2235 abort ();
2236
2237 *call_fusage
2238 = gen_rtx_EXPR_LIST (VOIDmode,
2239 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2240 }
2241
2242 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2243 starting at REGNO. All of these registers must be hard registers. */
2244
2245 void
2246 use_regs (call_fusage, regno, nregs)
2247 rtx *call_fusage;
2248 int regno;
2249 int nregs;
2250 {
2251 int i;
2252
2253 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2254 abort ();
2255
2256 for (i = 0; i < nregs; i++)
2257 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2258 }
2259
2260 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2261 PARALLEL REGS. This is for calls that pass values in multiple
2262 non-contiguous locations. The Irix 6 ABI has examples of this. */
2263
2264 void
2265 use_group_regs (call_fusage, regs)
2266 rtx *call_fusage;
2267 rtx regs;
2268 {
2269 int i;
2270
2271 for (i = 0; i < XVECLEN (regs, 0); i++)
2272 {
2273 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2274
2275 /* A NULL entry means the parameter goes both on the stack and in
2276 registers. This can also be a MEM for targets that pass values
2277 partially on the stack and partially in registers. */
2278 if (reg != 0 && GET_CODE (reg) == REG)
2279 use_reg (call_fusage, reg);
2280 }
2281 }
2282 \f
2283
2284 int
2285 can_store_by_pieces (len, constfun, constfundata, align)
2286 unsigned HOST_WIDE_INT len;
2287 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2288 PTR constfundata;
2289 unsigned int align;
2290 {
2291 unsigned HOST_WIDE_INT max_size, l;
2292 HOST_WIDE_INT offset = 0;
2293 enum machine_mode mode, tmode;
2294 enum insn_code icode;
2295 int reverse;
2296 rtx cst;
2297
2298 if (! MOVE_BY_PIECES_P (len, align))
2299 return 0;
2300
2301 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2302 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2303 align = MOVE_MAX * BITS_PER_UNIT;
2304
2305 /* We would first store what we can in the largest integer mode, then go to
2306 successively smaller modes. */
2307
2308 for (reverse = 0;
2309 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2310 reverse++)
2311 {
2312 l = len;
2313 mode = VOIDmode;
2314 max_size = MOVE_MAX_PIECES + 1;
2315 while (max_size > 1)
2316 {
2317 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2318 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2319 if (GET_MODE_SIZE (tmode) < max_size)
2320 mode = tmode;
2321
2322 if (mode == VOIDmode)
2323 break;
2324
2325 icode = mov_optab->handlers[(int) mode].insn_code;
2326 if (icode != CODE_FOR_nothing
2327 && align >= GET_MODE_ALIGNMENT (mode))
2328 {
2329 unsigned int size = GET_MODE_SIZE (mode);
2330
2331 while (l >= size)
2332 {
2333 if (reverse)
2334 offset -= size;
2335
2336 cst = (*constfun) (constfundata, offset, mode);
2337 if (!LEGITIMATE_CONSTANT_P (cst))
2338 return 0;
2339
2340 if (!reverse)
2341 offset += size;
2342
2343 l -= size;
2344 }
2345 }
2346
2347 max_size = GET_MODE_SIZE (mode);
2348 }
2349
2350 /* The code above should have handled everything. */
2351 if (l != 0)
2352 abort ();
2353 }
2354
2355 return 1;
2356 }
2357
2358 /* Generate several move instructions to store LEN bytes generated by
2359 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2360 pointer which will be passed as argument in every CONSTFUN call.
2361 ALIGN is maximum alignment we can assume. */
2362
2363 void
2364 store_by_pieces (to, len, constfun, constfundata, align)
2365 rtx to;
2366 unsigned HOST_WIDE_INT len;
2367 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2368 PTR constfundata;
2369 unsigned int align;
2370 {
2371 struct store_by_pieces data;
2372
2373 if (! MOVE_BY_PIECES_P (len, align))
2374 abort ();
2375 to = protect_from_queue (to, 1);
2376 data.constfun = constfun;
2377 data.constfundata = constfundata;
2378 data.len = len;
2379 data.to = to;
2380 store_by_pieces_1 (&data, align);
2381 }
2382
2383 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2384 rtx with BLKmode). The caller must pass TO through protect_from_queue
2385 before calling. ALIGN is maximum alignment we can assume. */
2386
2387 static void
2388 clear_by_pieces (to, len, align)
2389 rtx to;
2390 unsigned HOST_WIDE_INT len;
2391 unsigned int align;
2392 {
2393 struct store_by_pieces data;
2394
2395 data.constfun = clear_by_pieces_1;
2396 data.constfundata = NULL;
2397 data.len = len;
2398 data.to = to;
2399 store_by_pieces_1 (&data, align);
2400 }
2401
2402 /* Callback routine for clear_by_pieces.
2403 Return const0_rtx unconditionally. */
2404
2405 static rtx
2406 clear_by_pieces_1 (data, offset, mode)
2407 PTR data ATTRIBUTE_UNUSED;
2408 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2409 enum machine_mode mode ATTRIBUTE_UNUSED;
2410 {
2411 return const0_rtx;
2412 }
2413
2414 /* Subroutine of clear_by_pieces and store_by_pieces.
2415 Generate several move instructions to store LEN bytes of block TO. (A MEM
2416 rtx with BLKmode). The caller must pass TO through protect_from_queue
2417 before calling. ALIGN is maximum alignment we can assume. */
2418
2419 static void
2420 store_by_pieces_1 (data, align)
2421 struct store_by_pieces *data;
2422 unsigned int align;
2423 {
2424 rtx to_addr = XEXP (data->to, 0);
2425 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2426 enum machine_mode mode = VOIDmode, tmode;
2427 enum insn_code icode;
2428
2429 data->offset = 0;
2430 data->to_addr = to_addr;
2431 data->autinc_to
2432 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2433 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2434
2435 data->explicit_inc_to = 0;
2436 data->reverse
2437 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2438 if (data->reverse)
2439 data->offset = data->len;
2440
2441 /* If storing requires more than two move insns,
2442 copy addresses to registers (to make displacements shorter)
2443 and use post-increment if available. */
2444 if (!data->autinc_to
2445 && move_by_pieces_ninsns (data->len, align) > 2)
2446 {
2447 /* Determine the main mode we'll be using. */
2448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2450 if (GET_MODE_SIZE (tmode) < max_size)
2451 mode = tmode;
2452
2453 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2454 {
2455 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2456 data->autinc_to = 1;
2457 data->explicit_inc_to = -1;
2458 }
2459
2460 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2461 && ! data->autinc_to)
2462 {
2463 data->to_addr = copy_addr_to_reg (to_addr);
2464 data->autinc_to = 1;
2465 data->explicit_inc_to = 1;
2466 }
2467
2468 if ( !data->autinc_to && CONSTANT_P (to_addr))
2469 data->to_addr = copy_addr_to_reg (to_addr);
2470 }
2471
2472 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2473 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2474 align = MOVE_MAX * BITS_PER_UNIT;
2475
2476 /* First store what we can in the largest integer mode, then go to
2477 successively smaller modes. */
2478
2479 while (max_size > 1)
2480 {
2481 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2482 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2483 if (GET_MODE_SIZE (tmode) < max_size)
2484 mode = tmode;
2485
2486 if (mode == VOIDmode)
2487 break;
2488
2489 icode = mov_optab->handlers[(int) mode].insn_code;
2490 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2491 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2492
2493 max_size = GET_MODE_SIZE (mode);
2494 }
2495
2496 /* The code above should have handled everything. */
2497 if (data->len != 0)
2498 abort ();
2499 }
2500
2501 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2502 with move instructions for mode MODE. GENFUN is the gen_... function
2503 to make a move insn for that mode. DATA has all the other info. */
2504
2505 static void
2506 store_by_pieces_2 (genfun, mode, data)
2507 rtx (*genfun) PARAMS ((rtx, ...));
2508 enum machine_mode mode;
2509 struct store_by_pieces *data;
2510 {
2511 unsigned int size = GET_MODE_SIZE (mode);
2512 rtx to1, cst;
2513
2514 while (data->len >= size)
2515 {
2516 if (data->reverse)
2517 data->offset -= size;
2518
2519 if (data->autinc_to)
2520 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2521 data->offset);
2522 else
2523 to1 = adjust_address (data->to, mode, data->offset);
2524
2525 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2526 emit_insn (gen_add2_insn (data->to_addr,
2527 GEN_INT (-(HOST_WIDE_INT) size)));
2528
2529 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2530 emit_insn ((*genfun) (to1, cst));
2531
2532 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2533 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2534
2535 if (! data->reverse)
2536 data->offset += size;
2537
2538 data->len -= size;
2539 }
2540 }
2541 \f
2542 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2543 its length in bytes. */
2544
2545 rtx
2546 clear_storage (object, size)
2547 rtx object;
2548 rtx size;
2549 {
2550 #ifdef TARGET_MEM_FUNCTIONS
2551 static tree fn;
2552 tree call_expr, arg_list;
2553 #endif
2554 rtx retval = 0;
2555 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2556 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2557
2558 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2559 just move a zero. Otherwise, do this a piece at a time. */
2560 if (GET_MODE (object) != BLKmode
2561 && GET_CODE (size) == CONST_INT
2562 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2563 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2564 else
2565 {
2566 object = protect_from_queue (object, 1);
2567 size = protect_from_queue (size, 0);
2568
2569 if (GET_CODE (size) == CONST_INT
2570 && MOVE_BY_PIECES_P (INTVAL (size), align))
2571 clear_by_pieces (object, INTVAL (size), align);
2572 else
2573 {
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2577
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2580
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2583 {
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2586
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2600 {
2601 rtx op1;
2602 rtx last = get_last_insn ();
2603 rtx pat;
2604
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2609
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2611 if (pat)
2612 {
2613 emit_insn (pat);
2614 return 0;
2615 }
2616 else
2617 delete_insns_since (last);
2618 }
2619 }
2620
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2622
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2626
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2630
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos. We can then place those new pseudos
2633 into an RTL_EXPR and use them later, even after a call to
2634 emit_queue.
2635
2636 Note this is not strictly needed for library calls since they
2637 do not call emit_queue before loading their arguments. However,
2638 we may need to have library calls call emit_queue in the future
2639 since failing to do so could cause problems for targets which
2640 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2641 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2642
2643 #ifdef TARGET_MEM_FUNCTIONS
2644 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2645 #else
2646 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2647 TREE_UNSIGNED (integer_type_node));
2648 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2649 #endif
2650
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 /* It is incorrect to use the libcall calling conventions to call
2653 memset in this context.
2654
2655 This could be a user call to memset and the user may wish to
2656 examine the return value from memset.
2657
2658 For targets where libcalls and normal calls have different
2659 conventions for returning pointers, we could end up generating
2660 incorrect code.
2661
2662 So instead of using a libcall sequence we build up a suitable
2663 CALL_EXPR and expand the call in the normal fashion. */
2664 if (fn == NULL_TREE)
2665 {
2666 tree fntype;
2667
2668 /* This was copied from except.c, I don't know if all this is
2669 necessary in this context or not. */
2670 fn = get_identifier ("memset");
2671 fntype = build_pointer_type (void_type_node);
2672 fntype = build_function_type (fntype, NULL_TREE);
2673 fn = build_decl (FUNCTION_DECL, fn, fntype);
2674 ggc_add_tree_root (&fn, 1);
2675 DECL_EXTERNAL (fn) = 1;
2676 TREE_PUBLIC (fn) = 1;
2677 DECL_ARTIFICIAL (fn) = 1;
2678 TREE_NOTHROW (fn) = 1;
2679 make_decl_rtl (fn, NULL);
2680 assemble_external (fn);
2681 }
2682
2683 /* We need to make an argument list for the function call.
2684
2685 memset has three arguments, the first is a void * addresses, the
2686 second an integer with the initialization value, the last is a
2687 size_t byte count for the copy. */
2688 arg_list
2689 = build_tree_list (NULL_TREE,
2690 make_tree (build_pointer_type (void_type_node),
2691 object));
2692 TREE_CHAIN (arg_list)
2693 = build_tree_list (NULL_TREE,
2694 make_tree (integer_type_node, const0_rtx));
2695 TREE_CHAIN (TREE_CHAIN (arg_list))
2696 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2697 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2698
2699 /* Now we have to build up the CALL_EXPR itself. */
2700 call_expr = build1 (ADDR_EXPR,
2701 build_pointer_type (TREE_TYPE (fn)), fn);
2702 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2703 call_expr, arg_list, NULL_TREE);
2704 TREE_SIDE_EFFECTS (call_expr) = 1;
2705
2706 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2707 #else
2708 emit_library_call (bzero_libfunc, LCT_NORMAL,
2709 VOIDmode, 2, object, Pmode, size,
2710 TYPE_MODE (integer_type_node));
2711 #endif
2712
2713 /* If we are initializing a readonly value, show the above call
2714 clobbered it. Otherwise, a load from it may erroneously be
2715 hoisted from a loop. */
2716 if (RTX_UNCHANGING_P (object))
2717 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2718 }
2719 }
2720
2721 return retval;
2722 }
2723
2724 /* Generate code to copy Y into X.
2725 Both Y and X must have the same mode, except that
2726 Y can be a constant with VOIDmode.
2727 This mode cannot be BLKmode; use emit_block_move for that.
2728
2729 Return the last instruction emitted. */
2730
2731 rtx
2732 emit_move_insn (x, y)
2733 rtx x, y;
2734 {
2735 enum machine_mode mode = GET_MODE (x);
2736 rtx y_cst = NULL_RTX;
2737 rtx last_insn;
2738
2739 x = protect_from_queue (x, 1);
2740 y = protect_from_queue (y, 0);
2741
2742 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2743 abort ();
2744
2745 /* Never force constant_p_rtx to memory. */
2746 if (GET_CODE (y) == CONSTANT_P_RTX)
2747 ;
2748 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2749 {
2750 y_cst = y;
2751 y = force_const_mem (mode, y);
2752 }
2753
2754 /* If X or Y are memory references, verify that their addresses are valid
2755 for the machine. */
2756 if (GET_CODE (x) == MEM
2757 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2758 && ! push_operand (x, GET_MODE (x)))
2759 || (flag_force_addr
2760 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2761 x = validize_mem (x);
2762
2763 if (GET_CODE (y) == MEM
2764 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2765 || (flag_force_addr
2766 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2767 y = validize_mem (y);
2768
2769 if (mode == BLKmode)
2770 abort ();
2771
2772 last_insn = emit_move_insn_1 (x, y);
2773
2774 if (y_cst && GET_CODE (x) == REG)
2775 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2776
2777 return last_insn;
2778 }
2779
2780 /* Low level part of emit_move_insn.
2781 Called just like emit_move_insn, but assumes X and Y
2782 are basically valid. */
2783
2784 rtx
2785 emit_move_insn_1 (x, y)
2786 rtx x, y;
2787 {
2788 enum machine_mode mode = GET_MODE (x);
2789 enum machine_mode submode;
2790 enum mode_class class = GET_MODE_CLASS (mode);
2791 unsigned int i;
2792
2793 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2794 abort ();
2795
2796 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2797 return
2798 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2799
2800 /* Expand complex moves by moving real part and imag part, if possible. */
2801 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2802 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2803 * BITS_PER_UNIT),
2804 (class == MODE_COMPLEX_INT
2805 ? MODE_INT : MODE_FLOAT),
2806 0))
2807 && (mov_optab->handlers[(int) submode].insn_code
2808 != CODE_FOR_nothing))
2809 {
2810 /* Don't split destination if it is a stack push. */
2811 int stack = push_operand (x, GET_MODE (x));
2812
2813 #ifdef PUSH_ROUNDING
2814 /* In case we output to the stack, but the size is smaller machine can
2815 push exactly, we need to use move instructions. */
2816 if (stack
2817 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2818 {
2819 rtx temp;
2820 int offset1, offset2;
2821
2822 /* Do not use anti_adjust_stack, since we don't want to update
2823 stack_pointer_delta. */
2824 temp = expand_binop (Pmode,
2825 #ifdef STACK_GROWS_DOWNWARD
2826 sub_optab,
2827 #else
2828 add_optab,
2829 #endif
2830 stack_pointer_rtx,
2831 GEN_INT
2832 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2833 stack_pointer_rtx,
2834 0,
2835 OPTAB_LIB_WIDEN);
2836 if (temp != stack_pointer_rtx)
2837 emit_move_insn (stack_pointer_rtx, temp);
2838 #ifdef STACK_GROWS_DOWNWARD
2839 offset1 = 0;
2840 offset2 = GET_MODE_SIZE (submode);
2841 #else
2842 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2843 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2844 + GET_MODE_SIZE (submode));
2845 #endif
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2848 stack_pointer_rtx,
2849 GEN_INT (offset1))),
2850 gen_realpart (submode, y));
2851 emit_move_insn (change_address (x, submode,
2852 gen_rtx_PLUS (Pmode,
2853 stack_pointer_rtx,
2854 GEN_INT (offset2))),
2855 gen_imagpart (submode, y));
2856 }
2857 else
2858 #endif
2859 /* If this is a stack, push the highpart first, so it
2860 will be in the argument order.
2861
2862 In that case, change_address is used only to convert
2863 the mode, not to change the address. */
2864 if (stack)
2865 {
2866 /* Note that the real part always precedes the imag part in memory
2867 regardless of machine's endianness. */
2868 #ifdef STACK_GROWS_DOWNWARD
2869 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2870 (gen_rtx_MEM (submode, XEXP (x, 0)),
2871 gen_imagpart (submode, y)));
2872 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2873 (gen_rtx_MEM (submode, XEXP (x, 0)),
2874 gen_realpart (submode, y)));
2875 #else
2876 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2877 (gen_rtx_MEM (submode, XEXP (x, 0)),
2878 gen_realpart (submode, y)));
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (gen_rtx_MEM (submode, XEXP (x, 0)),
2881 gen_imagpart (submode, y)));
2882 #endif
2883 }
2884 else
2885 {
2886 rtx realpart_x, realpart_y;
2887 rtx imagpart_x, imagpart_y;
2888
2889 /* If this is a complex value with each part being smaller than a
2890 word, the usual calling sequence will likely pack the pieces into
2891 a single register. Unfortunately, SUBREG of hard registers only
2892 deals in terms of words, so we have a problem converting input
2893 arguments to the CONCAT of two registers that is used elsewhere
2894 for complex values. If this is before reload, we can copy it into
2895 memory and reload. FIXME, we should see about using extract and
2896 insert on integer registers, but complex short and complex char
2897 variables should be rarely used. */
2898 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2899 && (reload_in_progress | reload_completed) == 0)
2900 {
2901 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2902 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2903
2904 if (packed_dest_p || packed_src_p)
2905 {
2906 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2907 ? MODE_FLOAT : MODE_INT);
2908
2909 enum machine_mode reg_mode
2910 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2911
2912 if (reg_mode != BLKmode)
2913 {
2914 rtx mem = assign_stack_temp (reg_mode,
2915 GET_MODE_SIZE (mode), 0);
2916 rtx cmem = adjust_address (mem, mode, 0);
2917
2918 cfun->cannot_inline
2919 = N_("function using short complex types cannot be inline");
2920
2921 if (packed_dest_p)
2922 {
2923 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2924 emit_move_insn_1 (cmem, y);
2925 return emit_move_insn_1 (sreg, mem);
2926 }
2927 else
2928 {
2929 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2930 emit_move_insn_1 (mem, sreg);
2931 return emit_move_insn_1 (x, cmem);
2932 }
2933 }
2934 }
2935 }
2936
2937 realpart_x = gen_realpart (submode, x);
2938 realpart_y = gen_realpart (submode, y);
2939 imagpart_x = gen_imagpart (submode, x);
2940 imagpart_y = gen_imagpart (submode, y);
2941
2942 /* Show the output dies here. This is necessary for SUBREGs
2943 of pseudos since we cannot track their lifetimes correctly;
2944 hard regs shouldn't appear here except as return values.
2945 We never want to emit such a clobber after reload. */
2946 if (x != y
2947 && ! (reload_in_progress || reload_completed)
2948 && (GET_CODE (realpart_x) == SUBREG
2949 || GET_CODE (imagpart_x) == SUBREG))
2950 {
2951 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2952 }
2953
2954 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2955 (realpart_x, realpart_y));
2956 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2957 (imagpart_x, imagpart_y));
2958 }
2959
2960 return get_last_insn ();
2961 }
2962
2963 /* This will handle any multi-word mode that lacks a move_insn pattern.
2964 However, you will get better code if you define such patterns,
2965 even if they must turn into multiple assembler instructions. */
2966 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2967 {
2968 rtx last_insn = 0;
2969 rtx seq, inner;
2970 int need_clobber;
2971
2972 #ifdef PUSH_ROUNDING
2973
2974 /* If X is a push on the stack, do the push now and replace
2975 X with a reference to the stack pointer. */
2976 if (push_operand (x, GET_MODE (x)))
2977 {
2978 rtx temp;
2979 enum rtx_code code;
2980
2981 /* Do not use anti_adjust_stack, since we don't want to update
2982 stack_pointer_delta. */
2983 temp = expand_binop (Pmode,
2984 #ifdef STACK_GROWS_DOWNWARD
2985 sub_optab,
2986 #else
2987 add_optab,
2988 #endif
2989 stack_pointer_rtx,
2990 GEN_INT
2991 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2992 stack_pointer_rtx,
2993 0,
2994 OPTAB_LIB_WIDEN);
2995 if (temp != stack_pointer_rtx)
2996 emit_move_insn (stack_pointer_rtx, temp);
2997
2998 code = GET_CODE (XEXP (x, 0));
2999 /* Just hope that small offsets off SP are OK. */
3000 if (code == POST_INC)
3001 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3002 GEN_INT (-(HOST_WIDE_INT)
3003 GET_MODE_SIZE (GET_MODE (x))));
3004 else if (code == POST_DEC)
3005 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3006 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3007 else
3008 temp = stack_pointer_rtx;
3009
3010 x = change_address (x, VOIDmode, temp);
3011 }
3012 #endif
3013
3014 /* If we are in reload, see if either operand is a MEM whose address
3015 is scheduled for replacement. */
3016 if (reload_in_progress && GET_CODE (x) == MEM
3017 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3018 x = replace_equiv_address_nv (x, inner);
3019 if (reload_in_progress && GET_CODE (y) == MEM
3020 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3021 y = replace_equiv_address_nv (y, inner);
3022
3023 start_sequence ();
3024
3025 need_clobber = 0;
3026 for (i = 0;
3027 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3028 i++)
3029 {
3030 rtx xpart = operand_subword (x, i, 1, mode);
3031 rtx ypart = operand_subword (y, i, 1, mode);
3032
3033 /* If we can't get a part of Y, put Y into memory if it is a
3034 constant. Otherwise, force it into a register. If we still
3035 can't get a part of Y, abort. */
3036 if (ypart == 0 && CONSTANT_P (y))
3037 {
3038 y = force_const_mem (mode, y);
3039 ypart = operand_subword (y, i, 1, mode);
3040 }
3041 else if (ypart == 0)
3042 ypart = operand_subword_force (y, i, mode);
3043
3044 if (xpart == 0 || ypart == 0)
3045 abort ();
3046
3047 need_clobber |= (GET_CODE (xpart) == SUBREG);
3048
3049 last_insn = emit_move_insn (xpart, ypart);
3050 }
3051
3052 seq = gen_sequence ();
3053 end_sequence ();
3054
3055 /* Show the output dies here. This is necessary for SUBREGs
3056 of pseudos since we cannot track their lifetimes correctly;
3057 hard regs shouldn't appear here except as return values.
3058 We never want to emit such a clobber after reload. */
3059 if (x != y
3060 && ! (reload_in_progress || reload_completed)
3061 && need_clobber != 0)
3062 {
3063 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3064 }
3065
3066 emit_insn (seq);
3067
3068 return last_insn;
3069 }
3070 else
3071 abort ();
3072 }
3073 \f
3074 /* Pushing data onto the stack. */
3075
3076 /* Push a block of length SIZE (perhaps variable)
3077 and return an rtx to address the beginning of the block.
3078 Note that it is not possible for the value returned to be a QUEUED.
3079 The value may be virtual_outgoing_args_rtx.
3080
3081 EXTRA is the number of bytes of padding to push in addition to SIZE.
3082 BELOW nonzero means this padding comes at low addresses;
3083 otherwise, the padding comes at high addresses. */
3084
3085 rtx
3086 push_block (size, extra, below)
3087 rtx size;
3088 int extra, below;
3089 {
3090 rtx temp;
3091
3092 size = convert_modes (Pmode, ptr_mode, size, 1);
3093 if (CONSTANT_P (size))
3094 anti_adjust_stack (plus_constant (size, extra));
3095 else if (GET_CODE (size) == REG && extra == 0)
3096 anti_adjust_stack (size);
3097 else
3098 {
3099 temp = copy_to_mode_reg (Pmode, size);
3100 if (extra != 0)
3101 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3102 temp, 0, OPTAB_LIB_WIDEN);
3103 anti_adjust_stack (temp);
3104 }
3105
3106 #ifndef STACK_GROWS_DOWNWARD
3107 if (0)
3108 #else
3109 if (1)
3110 #endif
3111 {
3112 temp = virtual_outgoing_args_rtx;
3113 if (extra != 0 && below)
3114 temp = plus_constant (temp, extra);
3115 }
3116 else
3117 {
3118 if (GET_CODE (size) == CONST_INT)
3119 temp = plus_constant (virtual_outgoing_args_rtx,
3120 -INTVAL (size) - (below ? 0 : extra));
3121 else if (extra != 0 && !below)
3122 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3123 negate_rtx (Pmode, plus_constant (size, extra)));
3124 else
3125 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3126 negate_rtx (Pmode, size));
3127 }
3128
3129 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3130 }
3131
3132 #ifdef PUSH_ROUNDING
3133
3134 /* Emit single push insn. */
3135
3136 static void
3137 emit_single_push_insn (mode, x, type)
3138 rtx x;
3139 enum machine_mode mode;
3140 tree type;
3141 {
3142 rtx dest_addr;
3143 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3144 rtx dest;
3145 enum insn_code icode;
3146 insn_operand_predicate_fn pred;
3147
3148 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3149 /* If there is push pattern, use it. Otherwise try old way of throwing
3150 MEM representing push operation to move expander. */
3151 icode = push_optab->handlers[(int) mode].insn_code;
3152 if (icode != CODE_FOR_nothing)
3153 {
3154 if (((pred = insn_data[(int) icode].operand[0].predicate)
3155 && !((*pred) (x, mode))))
3156 x = force_reg (mode, x);
3157 emit_insn (GEN_FCN (icode) (x));
3158 return;
3159 }
3160 if (GET_MODE_SIZE (mode) == rounded_size)
3161 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3162 else
3163 {
3164 #ifdef STACK_GROWS_DOWNWARD
3165 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3166 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3167 #else
3168 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3169 GEN_INT (rounded_size));
3170 #endif
3171 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3172 }
3173
3174 dest = gen_rtx_MEM (mode, dest_addr);
3175
3176 if (type != 0)
3177 {
3178 set_mem_attributes (dest, type, 1);
3179
3180 if (flag_optimize_sibling_calls)
3181 /* Function incoming arguments may overlap with sibling call
3182 outgoing arguments and we cannot allow reordering of reads
3183 from function arguments with stores to outgoing arguments
3184 of sibling calls. */
3185 set_mem_alias_set (dest, 0);
3186 }
3187 emit_move_insn (dest, x);
3188 }
3189 #endif
3190
3191 /* Generate code to push X onto the stack, assuming it has mode MODE and
3192 type TYPE.
3193 MODE is redundant except when X is a CONST_INT (since they don't
3194 carry mode info).
3195 SIZE is an rtx for the size of data to be copied (in bytes),
3196 needed only if X is BLKmode.
3197
3198 ALIGN (in bits) is maximum alignment we can assume.
3199
3200 If PARTIAL and REG are both nonzero, then copy that many of the first
3201 words of X into registers starting with REG, and push the rest of X.
3202 The amount of space pushed is decreased by PARTIAL words,
3203 rounded *down* to a multiple of PARM_BOUNDARY.
3204 REG must be a hard register in this case.
3205 If REG is zero but PARTIAL is not, take any all others actions for an
3206 argument partially in registers, but do not actually load any
3207 registers.
3208
3209 EXTRA is the amount in bytes of extra space to leave next to this arg.
3210 This is ignored if an argument block has already been allocated.
3211
3212 On a machine that lacks real push insns, ARGS_ADDR is the address of
3213 the bottom of the argument block for this call. We use indexing off there
3214 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3215 argument block has not been preallocated.
3216
3217 ARGS_SO_FAR is the size of args previously pushed for this call.
3218
3219 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3220 for arguments passed in registers. If nonzero, it will be the number
3221 of bytes required. */
3222
3223 void
3224 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3225 args_addr, args_so_far, reg_parm_stack_space,
3226 alignment_pad)
3227 rtx x;
3228 enum machine_mode mode;
3229 tree type;
3230 rtx size;
3231 unsigned int align;
3232 int partial;
3233 rtx reg;
3234 int extra;
3235 rtx args_addr;
3236 rtx args_so_far;
3237 int reg_parm_stack_space;
3238 rtx alignment_pad;
3239 {
3240 rtx xinner;
3241 enum direction stack_direction
3242 #ifdef STACK_GROWS_DOWNWARD
3243 = downward;
3244 #else
3245 = upward;
3246 #endif
3247
3248 /* Decide where to pad the argument: `downward' for below,
3249 `upward' for above, or `none' for don't pad it.
3250 Default is below for small data on big-endian machines; else above. */
3251 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3252
3253 /* Invert direction if stack is post-decrement.
3254 FIXME: why? */
3255 if (STACK_PUSH_CODE == POST_DEC)
3256 if (where_pad != none)
3257 where_pad = (where_pad == downward ? upward : downward);
3258
3259 xinner = x = protect_from_queue (x, 0);
3260
3261 if (mode == BLKmode)
3262 {
3263 /* Copy a block into the stack, entirely or partially. */
3264
3265 rtx temp;
3266 int used = partial * UNITS_PER_WORD;
3267 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3268 int skip;
3269
3270 if (size == 0)
3271 abort ();
3272
3273 used -= offset;
3274
3275 /* USED is now the # of bytes we need not copy to the stack
3276 because registers will take care of them. */
3277
3278 if (partial != 0)
3279 xinner = adjust_address (xinner, BLKmode, used);
3280
3281 /* If the partial register-part of the arg counts in its stack size,
3282 skip the part of stack space corresponding to the registers.
3283 Otherwise, start copying to the beginning of the stack space,
3284 by setting SKIP to 0. */
3285 skip = (reg_parm_stack_space == 0) ? 0 : used;
3286
3287 #ifdef PUSH_ROUNDING
3288 /* Do it with several push insns if that doesn't take lots of insns
3289 and if there is no difficulty with push insns that skip bytes
3290 on the stack for alignment purposes. */
3291 if (args_addr == 0
3292 && PUSH_ARGS
3293 && GET_CODE (size) == CONST_INT
3294 && skip == 0
3295 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3296 /* Here we avoid the case of a structure whose weak alignment
3297 forces many pushes of a small amount of data,
3298 and such small pushes do rounding that causes trouble. */
3299 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3300 || align >= BIGGEST_ALIGNMENT
3301 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3302 == (align / BITS_PER_UNIT)))
3303 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3304 {
3305 /* Push padding now if padding above and stack grows down,
3306 or if padding below and stack grows up.
3307 But if space already allocated, this has already been done. */
3308 if (extra && args_addr == 0
3309 && where_pad != none && where_pad != stack_direction)
3310 anti_adjust_stack (GEN_INT (extra));
3311
3312 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3313 }
3314 else
3315 #endif /* PUSH_ROUNDING */
3316 {
3317 rtx target;
3318
3319 /* Otherwise make space on the stack and copy the data
3320 to the address of that space. */
3321
3322 /* Deduct words put into registers from the size we must copy. */
3323 if (partial != 0)
3324 {
3325 if (GET_CODE (size) == CONST_INT)
3326 size = GEN_INT (INTVAL (size) - used);
3327 else
3328 size = expand_binop (GET_MODE (size), sub_optab, size,
3329 GEN_INT (used), NULL_RTX, 0,
3330 OPTAB_LIB_WIDEN);
3331 }
3332
3333 /* Get the address of the stack space.
3334 In this case, we do not deal with EXTRA separately.
3335 A single stack adjust will do. */
3336 if (! args_addr)
3337 {
3338 temp = push_block (size, extra, where_pad == downward);
3339 extra = 0;
3340 }
3341 else if (GET_CODE (args_so_far) == CONST_INT)
3342 temp = memory_address (BLKmode,
3343 plus_constant (args_addr,
3344 skip + INTVAL (args_so_far)));
3345 else
3346 temp = memory_address (BLKmode,
3347 plus_constant (gen_rtx_PLUS (Pmode,
3348 args_addr,
3349 args_so_far),
3350 skip));
3351 target = gen_rtx_MEM (BLKmode, temp);
3352
3353 if (type != 0)
3354 {
3355 set_mem_attributes (target, type, 1);
3356 /* Function incoming arguments may overlap with sibling call
3357 outgoing arguments and we cannot allow reordering of reads
3358 from function arguments with stores to outgoing arguments
3359 of sibling calls. */
3360 set_mem_alias_set (target, 0);
3361 }
3362 else
3363 set_mem_align (target, align);
3364
3365 /* TEMP is the address of the block. Copy the data there. */
3366 if (GET_CODE (size) == CONST_INT
3367 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3368 {
3369 move_by_pieces (target, xinner, INTVAL (size), align);
3370 goto ret;
3371 }
3372 else
3373 {
3374 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3375 enum machine_mode mode;
3376
3377 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3378 mode != VOIDmode;
3379 mode = GET_MODE_WIDER_MODE (mode))
3380 {
3381 enum insn_code code = movstr_optab[(int) mode];
3382 insn_operand_predicate_fn pred;
3383
3384 if (code != CODE_FOR_nothing
3385 && ((GET_CODE (size) == CONST_INT
3386 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3387 <= (GET_MODE_MASK (mode) >> 1)))
3388 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3389 && (!(pred = insn_data[(int) code].operand[0].predicate)
3390 || ((*pred) (target, BLKmode)))
3391 && (!(pred = insn_data[(int) code].operand[1].predicate)
3392 || ((*pred) (xinner, BLKmode)))
3393 && (!(pred = insn_data[(int) code].operand[3].predicate)
3394 || ((*pred) (opalign, VOIDmode))))
3395 {
3396 rtx op2 = convert_to_mode (mode, size, 1);
3397 rtx last = get_last_insn ();
3398 rtx pat;
3399
3400 pred = insn_data[(int) code].operand[2].predicate;
3401 if (pred != 0 && ! (*pred) (op2, mode))
3402 op2 = copy_to_mode_reg (mode, op2);
3403
3404 pat = GEN_FCN ((int) code) (target, xinner,
3405 op2, opalign);
3406 if (pat)
3407 {
3408 emit_insn (pat);
3409 goto ret;
3410 }
3411 else
3412 delete_insns_since (last);
3413 }
3414 }
3415 }
3416
3417 if (!ACCUMULATE_OUTGOING_ARGS)
3418 {
3419 /* If the source is referenced relative to the stack pointer,
3420 copy it to another register to stabilize it. We do not need
3421 to do this if we know that we won't be changing sp. */
3422
3423 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3424 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3425 temp = copy_to_reg (temp);
3426 }
3427
3428 /* Make inhibit_defer_pop nonzero around the library call
3429 to force it to pop the bcopy-arguments right away. */
3430 NO_DEFER_POP;
3431 #ifdef TARGET_MEM_FUNCTIONS
3432 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3433 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3434 convert_to_mode (TYPE_MODE (sizetype),
3435 size, TREE_UNSIGNED (sizetype)),
3436 TYPE_MODE (sizetype));
3437 #else
3438 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3439 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3440 convert_to_mode (TYPE_MODE (integer_type_node),
3441 size,
3442 TREE_UNSIGNED (integer_type_node)),
3443 TYPE_MODE (integer_type_node));
3444 #endif
3445 OK_DEFER_POP;
3446 }
3447 }
3448 else if (partial > 0)
3449 {
3450 /* Scalar partly in registers. */
3451
3452 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3453 int i;
3454 int not_stack;
3455 /* # words of start of argument
3456 that we must make space for but need not store. */
3457 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3458 int args_offset = INTVAL (args_so_far);
3459 int skip;
3460
3461 /* Push padding now if padding above and stack grows down,
3462 or if padding below and stack grows up.
3463 But if space already allocated, this has already been done. */
3464 if (extra && args_addr == 0
3465 && where_pad != none && where_pad != stack_direction)
3466 anti_adjust_stack (GEN_INT (extra));
3467
3468 /* If we make space by pushing it, we might as well push
3469 the real data. Otherwise, we can leave OFFSET nonzero
3470 and leave the space uninitialized. */
3471 if (args_addr == 0)
3472 offset = 0;
3473
3474 /* Now NOT_STACK gets the number of words that we don't need to
3475 allocate on the stack. */
3476 not_stack = partial - offset;
3477
3478 /* If the partial register-part of the arg counts in its stack size,
3479 skip the part of stack space corresponding to the registers.
3480 Otherwise, start copying to the beginning of the stack space,
3481 by setting SKIP to 0. */
3482 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3483
3484 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3485 x = validize_mem (force_const_mem (mode, x));
3486
3487 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3488 SUBREGs of such registers are not allowed. */
3489 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3490 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3491 x = copy_to_reg (x);
3492
3493 /* Loop over all the words allocated on the stack for this arg. */
3494 /* We can do it by words, because any scalar bigger than a word
3495 has a size a multiple of a word. */
3496 #ifndef PUSH_ARGS_REVERSED
3497 for (i = not_stack; i < size; i++)
3498 #else
3499 for (i = size - 1; i >= not_stack; i--)
3500 #endif
3501 if (i >= not_stack + offset)
3502 emit_push_insn (operand_subword_force (x, i, mode),
3503 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3504 0, args_addr,
3505 GEN_INT (args_offset + ((i - not_stack + skip)
3506 * UNITS_PER_WORD)),
3507 reg_parm_stack_space, alignment_pad);
3508 }
3509 else
3510 {
3511 rtx addr;
3512 rtx target = NULL_RTX;
3513 rtx dest;
3514
3515 /* Push padding now if padding above and stack grows down,
3516 or if padding below and stack grows up.
3517 But if space already allocated, this has already been done. */
3518 if (extra && args_addr == 0
3519 && where_pad != none && where_pad != stack_direction)
3520 anti_adjust_stack (GEN_INT (extra));
3521
3522 #ifdef PUSH_ROUNDING
3523 if (args_addr == 0 && PUSH_ARGS)
3524 emit_single_push_insn (mode, x, type);
3525 else
3526 #endif
3527 {
3528 if (GET_CODE (args_so_far) == CONST_INT)
3529 addr
3530 = memory_address (mode,
3531 plus_constant (args_addr,
3532 INTVAL (args_so_far)));
3533 else
3534 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3535 args_so_far));
3536 target = addr;
3537 dest = gen_rtx_MEM (mode, addr);
3538 if (type != 0)
3539 {
3540 set_mem_attributes (dest, type, 1);
3541 /* Function incoming arguments may overlap with sibling call
3542 outgoing arguments and we cannot allow reordering of reads
3543 from function arguments with stores to outgoing arguments
3544 of sibling calls. */
3545 set_mem_alias_set (dest, 0);
3546 }
3547
3548 emit_move_insn (dest, x);
3549 }
3550
3551 }
3552
3553 ret:
3554 /* If part should go in registers, copy that part
3555 into the appropriate registers. Do this now, at the end,
3556 since mem-to-mem copies above may do function calls. */
3557 if (partial > 0 && reg != 0)
3558 {
3559 /* Handle calls that pass values in multiple non-contiguous locations.
3560 The Irix 6 ABI has examples of this. */
3561 if (GET_CODE (reg) == PARALLEL)
3562 emit_group_load (reg, x, -1); /* ??? size? */
3563 else
3564 move_block_to_reg (REGNO (reg), x, partial, mode);
3565 }
3566
3567 if (extra && args_addr == 0 && where_pad == stack_direction)
3568 anti_adjust_stack (GEN_INT (extra));
3569
3570 if (alignment_pad && args_addr == 0)
3571 anti_adjust_stack (alignment_pad);
3572 }
3573 \f
3574 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3575 operations. */
3576
3577 static rtx
3578 get_subtarget (x)
3579 rtx x;
3580 {
3581 return ((x == 0
3582 /* Only registers can be subtargets. */
3583 || GET_CODE (x) != REG
3584 /* If the register is readonly, it can't be set more than once. */
3585 || RTX_UNCHANGING_P (x)
3586 /* Don't use hard regs to avoid extending their life. */
3587 || REGNO (x) < FIRST_PSEUDO_REGISTER
3588 /* Avoid subtargets inside loops,
3589 since they hide some invariant expressions. */
3590 || preserve_subexpressions_p ())
3591 ? 0 : x);
3592 }
3593
3594 /* Expand an assignment that stores the value of FROM into TO.
3595 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3596 (This may contain a QUEUED rtx;
3597 if the value is constant, this rtx is a constant.)
3598 Otherwise, the returned value is NULL_RTX.
3599
3600 SUGGEST_REG is no longer actually used.
3601 It used to mean, copy the value through a register
3602 and return that register, if that is possible.
3603 We now use WANT_VALUE to decide whether to do this. */
3604
3605 rtx
3606 expand_assignment (to, from, want_value, suggest_reg)
3607 tree to, from;
3608 int want_value;
3609 int suggest_reg ATTRIBUTE_UNUSED;
3610 {
3611 rtx to_rtx = 0;
3612 rtx result;
3613
3614 /* Don't crash if the lhs of the assignment was erroneous. */
3615
3616 if (TREE_CODE (to) == ERROR_MARK)
3617 {
3618 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3619 return want_value ? result : NULL_RTX;
3620 }
3621
3622 /* Assignment of a structure component needs special treatment
3623 if the structure component's rtx is not simply a MEM.
3624 Assignment of an array element at a constant index, and assignment of
3625 an array element in an unaligned packed structure field, has the same
3626 problem. */
3627
3628 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3629 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3630 {
3631 enum machine_mode mode1;
3632 HOST_WIDE_INT bitsize, bitpos;
3633 rtx orig_to_rtx;
3634 tree offset;
3635 int unsignedp;
3636 int volatilep = 0;
3637 tree tem;
3638
3639 push_temp_slots ();
3640 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3641 &unsignedp, &volatilep);
3642
3643 /* If we are going to use store_bit_field and extract_bit_field,
3644 make sure to_rtx will be safe for multiple use. */
3645
3646 if (mode1 == VOIDmode && want_value)
3647 tem = stabilize_reference (tem);
3648
3649 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3650
3651 if (offset != 0)
3652 {
3653 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3654
3655 if (GET_CODE (to_rtx) != MEM)
3656 abort ();
3657
3658 if (GET_MODE (offset_rtx) != ptr_mode)
3659 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3660
3661 #ifdef POINTERS_EXTEND_UNSIGNED
3662 if (GET_MODE (offset_rtx) != Pmode)
3663 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3664 #endif
3665
3666 /* A constant address in TO_RTX can have VOIDmode, we must not try
3667 to call force_reg for that case. Avoid that case. */
3668 if (GET_CODE (to_rtx) == MEM
3669 && GET_MODE (to_rtx) == BLKmode
3670 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3671 && bitsize > 0
3672 && (bitpos % bitsize) == 0
3673 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3674 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3675 {
3676 rtx temp
3677 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3678
3679 if (GET_CODE (XEXP (temp, 0)) == REG)
3680 to_rtx = temp;
3681 else
3682 to_rtx = (replace_equiv_address
3683 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3684 XEXP (temp, 0))));
3685 bitpos = 0;
3686 }
3687
3688 to_rtx = offset_address (to_rtx, offset_rtx,
3689 highest_pow2_factor (offset));
3690 }
3691
3692 if (GET_CODE (to_rtx) == MEM)
3693 {
3694 tree old_expr = MEM_EXPR (to_rtx);
3695
3696 /* If the field is at offset zero, we could have been given the
3697 DECL_RTX of the parent struct. Don't munge it. */
3698 to_rtx = shallow_copy_rtx (to_rtx);
3699
3700 set_mem_attributes (to_rtx, to, 0);
3701
3702 /* If we changed MEM_EXPR, that means we're now referencing
3703 the COMPONENT_REF, which means that MEM_OFFSET must be
3704 relative to that field. But we've not yet reflected BITPOS
3705 in TO_RTX. This will be done in store_field. Adjust for
3706 that by biasing MEM_OFFSET by -bitpos. */
3707 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3708 && (bitpos / BITS_PER_UNIT) != 0)
3709 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3710 - (bitpos / BITS_PER_UNIT)));
3711 }
3712
3713 /* Deal with volatile and readonly fields. The former is only done
3714 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3715 if (volatilep && GET_CODE (to_rtx) == MEM)
3716 {
3717 if (to_rtx == orig_to_rtx)
3718 to_rtx = copy_rtx (to_rtx);
3719 MEM_VOLATILE_P (to_rtx) = 1;
3720 }
3721
3722 if (TREE_CODE (to) == COMPONENT_REF
3723 && TREE_READONLY (TREE_OPERAND (to, 1)))
3724 {
3725 if (to_rtx == orig_to_rtx)
3726 to_rtx = copy_rtx (to_rtx);
3727 RTX_UNCHANGING_P (to_rtx) = 1;
3728 }
3729
3730 if (! can_address_p (to))
3731 {
3732 if (to_rtx == orig_to_rtx)
3733 to_rtx = copy_rtx (to_rtx);
3734 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3735 }
3736
3737 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3738 (want_value
3739 /* Spurious cast for HPUX compiler. */
3740 ? ((enum machine_mode)
3741 TYPE_MODE (TREE_TYPE (to)))
3742 : VOIDmode),
3743 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3744
3745 preserve_temp_slots (result);
3746 free_temp_slots ();
3747 pop_temp_slots ();
3748
3749 /* If the value is meaningful, convert RESULT to the proper mode.
3750 Otherwise, return nothing. */
3751 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3752 TYPE_MODE (TREE_TYPE (from)),
3753 result,
3754 TREE_UNSIGNED (TREE_TYPE (to)))
3755 : NULL_RTX);
3756 }
3757
3758 /* If the rhs is a function call and its value is not an aggregate,
3759 call the function before we start to compute the lhs.
3760 This is needed for correct code for cases such as
3761 val = setjmp (buf) on machines where reference to val
3762 requires loading up part of an address in a separate insn.
3763
3764 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3765 since it might be a promoted variable where the zero- or sign- extension
3766 needs to be done. Handling this in the normal way is safe because no
3767 computation is done before the call. */
3768 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3769 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3770 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3771 && GET_CODE (DECL_RTL (to)) == REG))
3772 {
3773 rtx value;
3774
3775 push_temp_slots ();
3776 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3777 if (to_rtx == 0)
3778 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3779
3780 /* Handle calls that return values in multiple non-contiguous locations.
3781 The Irix 6 ABI has examples of this. */
3782 if (GET_CODE (to_rtx) == PARALLEL)
3783 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3784 else if (GET_MODE (to_rtx) == BLKmode)
3785 emit_block_move (to_rtx, value, expr_size (from));
3786 else
3787 {
3788 #ifdef POINTERS_EXTEND_UNSIGNED
3789 if (POINTER_TYPE_P (TREE_TYPE (to))
3790 && GET_MODE (to_rtx) != GET_MODE (value))
3791 value = convert_memory_address (GET_MODE (to_rtx), value);
3792 #endif
3793 emit_move_insn (to_rtx, value);
3794 }
3795 preserve_temp_slots (to_rtx);
3796 free_temp_slots ();
3797 pop_temp_slots ();
3798 return want_value ? to_rtx : NULL_RTX;
3799 }
3800
3801 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3802 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3803
3804 if (to_rtx == 0)
3805 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3806
3807 /* Don't move directly into a return register. */
3808 if (TREE_CODE (to) == RESULT_DECL
3809 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3810 {
3811 rtx temp;
3812
3813 push_temp_slots ();
3814 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3815
3816 if (GET_CODE (to_rtx) == PARALLEL)
3817 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3818 else
3819 emit_move_insn (to_rtx, temp);
3820
3821 preserve_temp_slots (to_rtx);
3822 free_temp_slots ();
3823 pop_temp_slots ();
3824 return want_value ? to_rtx : NULL_RTX;
3825 }
3826
3827 /* In case we are returning the contents of an object which overlaps
3828 the place the value is being stored, use a safe function when copying
3829 a value through a pointer into a structure value return block. */
3830 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3831 && current_function_returns_struct
3832 && !current_function_returns_pcc_struct)
3833 {
3834 rtx from_rtx, size;
3835
3836 push_temp_slots ();
3837 size = expr_size (from);
3838 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3839
3840 #ifdef TARGET_MEM_FUNCTIONS
3841 emit_library_call (memmove_libfunc, LCT_NORMAL,
3842 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3843 XEXP (from_rtx, 0), Pmode,
3844 convert_to_mode (TYPE_MODE (sizetype),
3845 size, TREE_UNSIGNED (sizetype)),
3846 TYPE_MODE (sizetype));
3847 #else
3848 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3849 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3850 XEXP (to_rtx, 0), Pmode,
3851 convert_to_mode (TYPE_MODE (integer_type_node),
3852 size, TREE_UNSIGNED (integer_type_node)),
3853 TYPE_MODE (integer_type_node));
3854 #endif
3855
3856 preserve_temp_slots (to_rtx);
3857 free_temp_slots ();
3858 pop_temp_slots ();
3859 return want_value ? to_rtx : NULL_RTX;
3860 }
3861
3862 /* Compute FROM and store the value in the rtx we got. */
3863
3864 push_temp_slots ();
3865 result = store_expr (from, to_rtx, want_value);
3866 preserve_temp_slots (result);
3867 free_temp_slots ();
3868 pop_temp_slots ();
3869 return want_value ? result : NULL_RTX;
3870 }
3871
3872 /* Generate code for computing expression EXP,
3873 and storing the value into TARGET.
3874 TARGET may contain a QUEUED rtx.
3875
3876 If WANT_VALUE is nonzero, return a copy of the value
3877 not in TARGET, so that we can be sure to use the proper
3878 value in a containing expression even if TARGET has something
3879 else stored in it. If possible, we copy the value through a pseudo
3880 and return that pseudo. Or, if the value is constant, we try to
3881 return the constant. In some cases, we return a pseudo
3882 copied *from* TARGET.
3883
3884 If the mode is BLKmode then we may return TARGET itself.
3885 It turns out that in BLKmode it doesn't cause a problem.
3886 because C has no operators that could combine two different
3887 assignments into the same BLKmode object with different values
3888 with no sequence point. Will other languages need this to
3889 be more thorough?
3890
3891 If WANT_VALUE is 0, we return NULL, to make sure
3892 to catch quickly any cases where the caller uses the value
3893 and fails to set WANT_VALUE. */
3894
3895 rtx
3896 store_expr (exp, target, want_value)
3897 tree exp;
3898 rtx target;
3899 int want_value;
3900 {
3901 rtx temp;
3902 int dont_return_target = 0;
3903 int dont_store_target = 0;
3904
3905 if (TREE_CODE (exp) == COMPOUND_EXPR)
3906 {
3907 /* Perform first part of compound expression, then assign from second
3908 part. */
3909 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3910 emit_queue ();
3911 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3912 }
3913 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3914 {
3915 /* For conditional expression, get safe form of the target. Then
3916 test the condition, doing the appropriate assignment on either
3917 side. This avoids the creation of unnecessary temporaries.
3918 For non-BLKmode, it is more efficient not to do this. */
3919
3920 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3921
3922 emit_queue ();
3923 target = protect_from_queue (target, 1);
3924
3925 do_pending_stack_adjust ();
3926 NO_DEFER_POP;
3927 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3928 start_cleanup_deferral ();
3929 store_expr (TREE_OPERAND (exp, 1), target, 0);
3930 end_cleanup_deferral ();
3931 emit_queue ();
3932 emit_jump_insn (gen_jump (lab2));
3933 emit_barrier ();
3934 emit_label (lab1);
3935 start_cleanup_deferral ();
3936 store_expr (TREE_OPERAND (exp, 2), target, 0);
3937 end_cleanup_deferral ();
3938 emit_queue ();
3939 emit_label (lab2);
3940 OK_DEFER_POP;
3941
3942 return want_value ? target : NULL_RTX;
3943 }
3944 else if (queued_subexp_p (target))
3945 /* If target contains a postincrement, let's not risk
3946 using it as the place to generate the rhs. */
3947 {
3948 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3949 {
3950 /* Expand EXP into a new pseudo. */
3951 temp = gen_reg_rtx (GET_MODE (target));
3952 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3953 }
3954 else
3955 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3956
3957 /* If target is volatile, ANSI requires accessing the value
3958 *from* the target, if it is accessed. So make that happen.
3959 In no case return the target itself. */
3960 if (! MEM_VOLATILE_P (target) && want_value)
3961 dont_return_target = 1;
3962 }
3963 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3964 && GET_MODE (target) != BLKmode)
3965 /* If target is in memory and caller wants value in a register instead,
3966 arrange that. Pass TARGET as target for expand_expr so that,
3967 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3968 We know expand_expr will not use the target in that case.
3969 Don't do this if TARGET is volatile because we are supposed
3970 to write it and then read it. */
3971 {
3972 temp = expand_expr (exp, target, GET_MODE (target), 0);
3973 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3974 {
3975 /* If TEMP is already in the desired TARGET, only copy it from
3976 memory and don't store it there again. */
3977 if (temp == target
3978 || (rtx_equal_p (temp, target)
3979 && ! side_effects_p (temp) && ! side_effects_p (target)))
3980 dont_store_target = 1;
3981 temp = copy_to_reg (temp);
3982 }
3983 dont_return_target = 1;
3984 }
3985 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3986 /* If this is an scalar in a register that is stored in a wider mode
3987 than the declared mode, compute the result into its declared mode
3988 and then convert to the wider mode. Our value is the computed
3989 expression. */
3990 {
3991 /* If we don't want a value, we can do the conversion inside EXP,
3992 which will often result in some optimizations. Do the conversion
3993 in two steps: first change the signedness, if needed, then
3994 the extend. But don't do this if the type of EXP is a subtype
3995 of something else since then the conversion might involve
3996 more than just converting modes. */
3997 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3998 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3999 {
4000 if (TREE_UNSIGNED (TREE_TYPE (exp))
4001 != SUBREG_PROMOTED_UNSIGNED_P (target))
4002 exp
4003 = convert
4004 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4005 TREE_TYPE (exp)),
4006 exp);
4007
4008 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4009 SUBREG_PROMOTED_UNSIGNED_P (target)),
4010 exp);
4011 }
4012
4013 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4014
4015 /* If TEMP is a volatile MEM and we want a result value, make
4016 the access now so it gets done only once. Likewise if
4017 it contains TARGET. */
4018 if (GET_CODE (temp) == MEM && want_value
4019 && (MEM_VOLATILE_P (temp)
4020 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4021 temp = copy_to_reg (temp);
4022
4023 /* If TEMP is a VOIDmode constant, use convert_modes to make
4024 sure that we properly convert it. */
4025 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4026 {
4027 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4028 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4029 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4030 GET_MODE (target), temp,
4031 SUBREG_PROMOTED_UNSIGNED_P (target));
4032 }
4033
4034 convert_move (SUBREG_REG (target), temp,
4035 SUBREG_PROMOTED_UNSIGNED_P (target));
4036
4037 /* If we promoted a constant, change the mode back down to match
4038 target. Otherwise, the caller might get confused by a result whose
4039 mode is larger than expected. */
4040
4041 if (want_value && GET_MODE (temp) != GET_MODE (target)
4042 && GET_MODE (temp) != VOIDmode)
4043 {
4044 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4045 SUBREG_PROMOTED_VAR_P (temp) = 1;
4046 SUBREG_PROMOTED_UNSIGNED_P (temp)
4047 = SUBREG_PROMOTED_UNSIGNED_P (target);
4048 }
4049
4050 return want_value ? temp : NULL_RTX;
4051 }
4052 else
4053 {
4054 temp = expand_expr (exp, target, GET_MODE (target), 0);
4055 /* Return TARGET if it's a specified hardware register.
4056 If TARGET is a volatile mem ref, either return TARGET
4057 or return a reg copied *from* TARGET; ANSI requires this.
4058
4059 Otherwise, if TEMP is not TARGET, return TEMP
4060 if it is constant (for efficiency),
4061 or if we really want the correct value. */
4062 if (!(target && GET_CODE (target) == REG
4063 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4064 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4065 && ! rtx_equal_p (temp, target)
4066 && (CONSTANT_P (temp) || want_value))
4067 dont_return_target = 1;
4068 }
4069
4070 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4071 the same as that of TARGET, adjust the constant. This is needed, for
4072 example, in case it is a CONST_DOUBLE and we want only a word-sized
4073 value. */
4074 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4075 && TREE_CODE (exp) != ERROR_MARK
4076 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4077 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4078 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4079
4080 /* If value was not generated in the target, store it there.
4081 Convert the value to TARGET's type first if necessary.
4082 If TEMP and TARGET compare equal according to rtx_equal_p, but
4083 one or both of them are volatile memory refs, we have to distinguish
4084 two cases:
4085 - expand_expr has used TARGET. In this case, we must not generate
4086 another copy. This can be detected by TARGET being equal according
4087 to == .
4088 - expand_expr has not used TARGET - that means that the source just
4089 happens to have the same RTX form. Since temp will have been created
4090 by expand_expr, it will compare unequal according to == .
4091 We must generate a copy in this case, to reach the correct number
4092 of volatile memory references. */
4093
4094 if ((! rtx_equal_p (temp, target)
4095 || (temp != target && (side_effects_p (temp)
4096 || side_effects_p (target))))
4097 && TREE_CODE (exp) != ERROR_MARK
4098 && ! dont_store_target)
4099 {
4100 target = protect_from_queue (target, 1);
4101 if (GET_MODE (temp) != GET_MODE (target)
4102 && GET_MODE (temp) != VOIDmode)
4103 {
4104 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4105 if (dont_return_target)
4106 {
4107 /* In this case, we will return TEMP,
4108 so make sure it has the proper mode.
4109 But don't forget to store the value into TARGET. */
4110 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4111 emit_move_insn (target, temp);
4112 }
4113 else
4114 convert_move (target, temp, unsignedp);
4115 }
4116
4117 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4118 {
4119 /* Handle copying a string constant into an array. The string
4120 constant may be shorter than the array. So copy just the string's
4121 actual length, and clear the rest. First get the size of the data
4122 type of the string, which is actually the size of the target. */
4123 rtx size = expr_size (exp);
4124
4125 if (GET_CODE (size) == CONST_INT
4126 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4127 emit_block_move (target, temp, size);
4128 else
4129 {
4130 /* Compute the size of the data to copy from the string. */
4131 tree copy_size
4132 = size_binop (MIN_EXPR,
4133 make_tree (sizetype, size),
4134 size_int (TREE_STRING_LENGTH (exp)));
4135 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4136 VOIDmode, 0);
4137 rtx label = 0;
4138
4139 /* Copy that much. */
4140 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4141 emit_block_move (target, temp, copy_size_rtx);
4142
4143 /* Figure out how much is left in TARGET that we have to clear.
4144 Do all calculations in ptr_mode. */
4145 if (GET_CODE (copy_size_rtx) == CONST_INT)
4146 {
4147 size = plus_constant (size, -INTVAL (copy_size_rtx));
4148 target = adjust_address (target, BLKmode,
4149 INTVAL (copy_size_rtx));
4150 }
4151 else
4152 {
4153 size = expand_binop (ptr_mode, sub_optab, size,
4154 copy_size_rtx, NULL_RTX, 0,
4155 OPTAB_LIB_WIDEN);
4156
4157 #ifdef POINTERS_EXTEND_UNSIGNED
4158 if (GET_MODE (copy_size_rtx) != Pmode)
4159 copy_size_rtx = convert_memory_address (Pmode,
4160 copy_size_rtx);
4161 #endif
4162
4163 target = offset_address (target, copy_size_rtx,
4164 highest_pow2_factor (copy_size));
4165 label = gen_label_rtx ();
4166 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4167 GET_MODE (size), 0, label);
4168 }
4169
4170 if (size != const0_rtx)
4171 clear_storage (target, size);
4172
4173 if (label)
4174 emit_label (label);
4175 }
4176 }
4177 /* Handle calls that return values in multiple non-contiguous locations.
4178 The Irix 6 ABI has examples of this. */
4179 else if (GET_CODE (target) == PARALLEL)
4180 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4181 else if (GET_MODE (temp) == BLKmode)
4182 emit_block_move (target, temp, expr_size (exp));
4183 else
4184 emit_move_insn (target, temp);
4185 }
4186
4187 /* If we don't want a value, return NULL_RTX. */
4188 if (! want_value)
4189 return NULL_RTX;
4190
4191 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4192 ??? The latter test doesn't seem to make sense. */
4193 else if (dont_return_target && GET_CODE (temp) != MEM)
4194 return temp;
4195
4196 /* Return TARGET itself if it is a hard register. */
4197 else if (want_value && GET_MODE (target) != BLKmode
4198 && ! (GET_CODE (target) == REG
4199 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4200 return copy_to_reg (target);
4201
4202 else
4203 return target;
4204 }
4205 \f
4206 /* Return 1 if EXP just contains zeros. */
4207
4208 static int
4209 is_zeros_p (exp)
4210 tree exp;
4211 {
4212 tree elt;
4213
4214 switch (TREE_CODE (exp))
4215 {
4216 case CONVERT_EXPR:
4217 case NOP_EXPR:
4218 case NON_LVALUE_EXPR:
4219 case VIEW_CONVERT_EXPR:
4220 return is_zeros_p (TREE_OPERAND (exp, 0));
4221
4222 case INTEGER_CST:
4223 return integer_zerop (exp);
4224
4225 case COMPLEX_CST:
4226 return
4227 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4228
4229 case REAL_CST:
4230 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4231
4232 case CONSTRUCTOR:
4233 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4234 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4235 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4236 if (! is_zeros_p (TREE_VALUE (elt)))
4237 return 0;
4238
4239 return 1;
4240
4241 default:
4242 return 0;
4243 }
4244 }
4245
4246 /* Return 1 if EXP contains mostly (3/4) zeros. */
4247
4248 static int
4249 mostly_zeros_p (exp)
4250 tree exp;
4251 {
4252 if (TREE_CODE (exp) == CONSTRUCTOR)
4253 {
4254 int elts = 0, zeros = 0;
4255 tree elt = CONSTRUCTOR_ELTS (exp);
4256 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4257 {
4258 /* If there are no ranges of true bits, it is all zero. */
4259 return elt == NULL_TREE;
4260 }
4261 for (; elt; elt = TREE_CHAIN (elt))
4262 {
4263 /* We do not handle the case where the index is a RANGE_EXPR,
4264 so the statistic will be somewhat inaccurate.
4265 We do make a more accurate count in store_constructor itself,
4266 so since this function is only used for nested array elements,
4267 this should be close enough. */
4268 if (mostly_zeros_p (TREE_VALUE (elt)))
4269 zeros++;
4270 elts++;
4271 }
4272
4273 return 4 * zeros >= 3 * elts;
4274 }
4275
4276 return is_zeros_p (exp);
4277 }
4278 \f
4279 /* Helper function for store_constructor.
4280 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4281 TYPE is the type of the CONSTRUCTOR, not the element type.
4282 CLEARED is as for store_constructor.
4283 ALIAS_SET is the alias set to use for any stores.
4284
4285 This provides a recursive shortcut back to store_constructor when it isn't
4286 necessary to go through store_field. This is so that we can pass through
4287 the cleared field to let store_constructor know that we may not have to
4288 clear a substructure if the outer structure has already been cleared. */
4289
4290 static void
4291 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4292 alias_set)
4293 rtx target;
4294 unsigned HOST_WIDE_INT bitsize;
4295 HOST_WIDE_INT bitpos;
4296 enum machine_mode mode;
4297 tree exp, type;
4298 int cleared;
4299 int alias_set;
4300 {
4301 if (TREE_CODE (exp) == CONSTRUCTOR
4302 && bitpos % BITS_PER_UNIT == 0
4303 /* If we have a non-zero bitpos for a register target, then we just
4304 let store_field do the bitfield handling. This is unlikely to
4305 generate unnecessary clear instructions anyways. */
4306 && (bitpos == 0 || GET_CODE (target) == MEM))
4307 {
4308 if (GET_CODE (target) == MEM)
4309 target
4310 = adjust_address (target,
4311 GET_MODE (target) == BLKmode
4312 || 0 != (bitpos
4313 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4314 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4315
4316
4317 /* Update the alias set, if required. */
4318 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4319 && MEM_ALIAS_SET (target) != 0)
4320 {
4321 target = copy_rtx (target);
4322 set_mem_alias_set (target, alias_set);
4323 }
4324
4325 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4326 }
4327 else
4328 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4329 alias_set);
4330 }
4331
4332 /* Store the value of constructor EXP into the rtx TARGET.
4333 TARGET is either a REG or a MEM; we know it cannot conflict, since
4334 safe_from_p has been called.
4335 CLEARED is true if TARGET is known to have been zero'd.
4336 SIZE is the number of bytes of TARGET we are allowed to modify: this
4337 may not be the same as the size of EXP if we are assigning to a field
4338 which has been packed to exclude padding bits. */
4339
4340 static void
4341 store_constructor (exp, target, cleared, size)
4342 tree exp;
4343 rtx target;
4344 int cleared;
4345 HOST_WIDE_INT size;
4346 {
4347 tree type = TREE_TYPE (exp);
4348 #ifdef WORD_REGISTER_OPERATIONS
4349 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4350 #endif
4351
4352 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4353 || TREE_CODE (type) == QUAL_UNION_TYPE)
4354 {
4355 tree elt;
4356
4357 /* We either clear the aggregate or indicate the value is dead. */
4358 if ((TREE_CODE (type) == UNION_TYPE
4359 || TREE_CODE (type) == QUAL_UNION_TYPE)
4360 && ! cleared
4361 && ! CONSTRUCTOR_ELTS (exp))
4362 /* If the constructor is empty, clear the union. */
4363 {
4364 clear_storage (target, expr_size (exp));
4365 cleared = 1;
4366 }
4367
4368 /* If we are building a static constructor into a register,
4369 set the initial value as zero so we can fold the value into
4370 a constant. But if more than one register is involved,
4371 this probably loses. */
4372 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4373 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4374 {
4375 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4376 cleared = 1;
4377 }
4378
4379 /* If the constructor has fewer fields than the structure
4380 or if we are initializing the structure to mostly zeros,
4381 clear the whole structure first. Don't do this if TARGET is a
4382 register whose mode size isn't equal to SIZE since clear_storage
4383 can't handle this case. */
4384 else if (! cleared && size > 0
4385 && ((list_length (CONSTRUCTOR_ELTS (exp))
4386 != fields_length (type))
4387 || mostly_zeros_p (exp))
4388 && (GET_CODE (target) != REG
4389 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4390 == size)))
4391 {
4392 clear_storage (target, GEN_INT (size));
4393 cleared = 1;
4394 }
4395
4396 if (! cleared)
4397 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4398
4399 /* Store each element of the constructor into
4400 the corresponding field of TARGET. */
4401
4402 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4403 {
4404 tree field = TREE_PURPOSE (elt);
4405 tree value = TREE_VALUE (elt);
4406 enum machine_mode mode;
4407 HOST_WIDE_INT bitsize;
4408 HOST_WIDE_INT bitpos = 0;
4409 int unsignedp;
4410 tree offset;
4411 rtx to_rtx = target;
4412
4413 /* Just ignore missing fields.
4414 We cleared the whole structure, above,
4415 if any fields are missing. */
4416 if (field == 0)
4417 continue;
4418
4419 if (cleared && is_zeros_p (value))
4420 continue;
4421
4422 if (host_integerp (DECL_SIZE (field), 1))
4423 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4424 else
4425 bitsize = -1;
4426
4427 unsignedp = TREE_UNSIGNED (field);
4428 mode = DECL_MODE (field);
4429 if (DECL_BIT_FIELD (field))
4430 mode = VOIDmode;
4431
4432 offset = DECL_FIELD_OFFSET (field);
4433 if (host_integerp (offset, 0)
4434 && host_integerp (bit_position (field), 0))
4435 {
4436 bitpos = int_bit_position (field);
4437 offset = 0;
4438 }
4439 else
4440 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4441
4442 if (offset)
4443 {
4444 rtx offset_rtx;
4445
4446 if (contains_placeholder_p (offset))
4447 offset = build (WITH_RECORD_EXPR, sizetype,
4448 offset, make_tree (TREE_TYPE (exp), target));
4449
4450 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4451 if (GET_CODE (to_rtx) != MEM)
4452 abort ();
4453
4454 if (GET_MODE (offset_rtx) != ptr_mode)
4455 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4456
4457 #ifdef POINTERS_EXTEND_UNSIGNED
4458 if (GET_MODE (offset_rtx) != Pmode)
4459 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4460 #endif
4461
4462 to_rtx = offset_address (to_rtx, offset_rtx,
4463 highest_pow2_factor (offset));
4464 }
4465
4466 if (TREE_READONLY (field))
4467 {
4468 if (GET_CODE (to_rtx) == MEM)
4469 to_rtx = copy_rtx (to_rtx);
4470
4471 RTX_UNCHANGING_P (to_rtx) = 1;
4472 }
4473
4474 #ifdef WORD_REGISTER_OPERATIONS
4475 /* If this initializes a field that is smaller than a word, at the
4476 start of a word, try to widen it to a full word.
4477 This special case allows us to output C++ member function
4478 initializations in a form that the optimizers can understand. */
4479 if (GET_CODE (target) == REG
4480 && bitsize < BITS_PER_WORD
4481 && bitpos % BITS_PER_WORD == 0
4482 && GET_MODE_CLASS (mode) == MODE_INT
4483 && TREE_CODE (value) == INTEGER_CST
4484 && exp_size >= 0
4485 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4486 {
4487 tree type = TREE_TYPE (value);
4488
4489 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4490 {
4491 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4492 value = convert (type, value);
4493 }
4494
4495 if (BYTES_BIG_ENDIAN)
4496 value
4497 = fold (build (LSHIFT_EXPR, type, value,
4498 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4499 bitsize = BITS_PER_WORD;
4500 mode = word_mode;
4501 }
4502 #endif
4503
4504 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4505 && DECL_NONADDRESSABLE_P (field))
4506 {
4507 to_rtx = copy_rtx (to_rtx);
4508 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4509 }
4510
4511 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4512 value, type, cleared,
4513 get_alias_set (TREE_TYPE (field)));
4514 }
4515 }
4516 else if (TREE_CODE (type) == ARRAY_TYPE)
4517 {
4518 tree elt;
4519 int i;
4520 int need_to_clear;
4521 tree domain = TYPE_DOMAIN (type);
4522 tree elttype = TREE_TYPE (type);
4523 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4524 && TYPE_MAX_VALUE (domain)
4525 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4526 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4527 HOST_WIDE_INT minelt = 0;
4528 HOST_WIDE_INT maxelt = 0;
4529
4530 /* If we have constant bounds for the range of the type, get them. */
4531 if (const_bounds_p)
4532 {
4533 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4534 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4535 }
4536
4537 /* If the constructor has fewer elements than the array,
4538 clear the whole array first. Similarly if this is
4539 static constructor of a non-BLKmode object. */
4540 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4541 need_to_clear = 1;
4542 else
4543 {
4544 HOST_WIDE_INT count = 0, zero_count = 0;
4545 need_to_clear = ! const_bounds_p;
4546
4547 /* This loop is a more accurate version of the loop in
4548 mostly_zeros_p (it handles RANGE_EXPR in an index).
4549 It is also needed to check for missing elements. */
4550 for (elt = CONSTRUCTOR_ELTS (exp);
4551 elt != NULL_TREE && ! need_to_clear;
4552 elt = TREE_CHAIN (elt))
4553 {
4554 tree index = TREE_PURPOSE (elt);
4555 HOST_WIDE_INT this_node_count;
4556
4557 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4558 {
4559 tree lo_index = TREE_OPERAND (index, 0);
4560 tree hi_index = TREE_OPERAND (index, 1);
4561
4562 if (! host_integerp (lo_index, 1)
4563 || ! host_integerp (hi_index, 1))
4564 {
4565 need_to_clear = 1;
4566 break;
4567 }
4568
4569 this_node_count = (tree_low_cst (hi_index, 1)
4570 - tree_low_cst (lo_index, 1) + 1);
4571 }
4572 else
4573 this_node_count = 1;
4574
4575 count += this_node_count;
4576 if (mostly_zeros_p (TREE_VALUE (elt)))
4577 zero_count += this_node_count;
4578 }
4579
4580 /* Clear the entire array first if there are any missing elements,
4581 or if the incidence of zero elements is >= 75%. */
4582 if (! need_to_clear
4583 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4584 need_to_clear = 1;
4585 }
4586
4587 if (need_to_clear && size > 0)
4588 {
4589 if (! cleared)
4590 clear_storage (target, GEN_INT (size));
4591 cleared = 1;
4592 }
4593 else if (REG_P (target))
4594 /* Inform later passes that the old value is dead. */
4595 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4596
4597 /* Store each element of the constructor into
4598 the corresponding element of TARGET, determined
4599 by counting the elements. */
4600 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4601 elt;
4602 elt = TREE_CHAIN (elt), i++)
4603 {
4604 enum machine_mode mode;
4605 HOST_WIDE_INT bitsize;
4606 HOST_WIDE_INT bitpos;
4607 int unsignedp;
4608 tree value = TREE_VALUE (elt);
4609 tree index = TREE_PURPOSE (elt);
4610 rtx xtarget = target;
4611
4612 if (cleared && is_zeros_p (value))
4613 continue;
4614
4615 unsignedp = TREE_UNSIGNED (elttype);
4616 mode = TYPE_MODE (elttype);
4617 if (mode == BLKmode)
4618 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4619 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4620 : -1);
4621 else
4622 bitsize = GET_MODE_BITSIZE (mode);
4623
4624 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4625 {
4626 tree lo_index = TREE_OPERAND (index, 0);
4627 tree hi_index = TREE_OPERAND (index, 1);
4628 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4629 struct nesting *loop;
4630 HOST_WIDE_INT lo, hi, count;
4631 tree position;
4632
4633 /* If the range is constant and "small", unroll the loop. */
4634 if (const_bounds_p
4635 && host_integerp (lo_index, 0)
4636 && host_integerp (hi_index, 0)
4637 && (lo = tree_low_cst (lo_index, 0),
4638 hi = tree_low_cst (hi_index, 0),
4639 count = hi - lo + 1,
4640 (GET_CODE (target) != MEM
4641 || count <= 2
4642 || (host_integerp (TYPE_SIZE (elttype), 1)
4643 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4644 <= 40 * 8)))))
4645 {
4646 lo -= minelt; hi -= minelt;
4647 for (; lo <= hi; lo++)
4648 {
4649 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4650
4651 if (GET_CODE (target) == MEM
4652 && !MEM_KEEP_ALIAS_SET_P (target)
4653 && TYPE_NONALIASED_COMPONENT (type))
4654 {
4655 target = copy_rtx (target);
4656 MEM_KEEP_ALIAS_SET_P (target) = 1;
4657 }
4658
4659 store_constructor_field
4660 (target, bitsize, bitpos, mode, value, type, cleared,
4661 get_alias_set (elttype));
4662 }
4663 }
4664 else
4665 {
4666 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4667 loop_top = gen_label_rtx ();
4668 loop_end = gen_label_rtx ();
4669
4670 unsignedp = TREE_UNSIGNED (domain);
4671
4672 index = build_decl (VAR_DECL, NULL_TREE, domain);
4673
4674 index_r
4675 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4676 &unsignedp, 0));
4677 SET_DECL_RTL (index, index_r);
4678 if (TREE_CODE (value) == SAVE_EXPR
4679 && SAVE_EXPR_RTL (value) == 0)
4680 {
4681 /* Make sure value gets expanded once before the
4682 loop. */
4683 expand_expr (value, const0_rtx, VOIDmode, 0);
4684 emit_queue ();
4685 }
4686 store_expr (lo_index, index_r, 0);
4687 loop = expand_start_loop (0);
4688
4689 /* Assign value to element index. */
4690 position
4691 = convert (ssizetype,
4692 fold (build (MINUS_EXPR, TREE_TYPE (index),
4693 index, TYPE_MIN_VALUE (domain))));
4694 position = size_binop (MULT_EXPR, position,
4695 convert (ssizetype,
4696 TYPE_SIZE_UNIT (elttype)));
4697
4698 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4699 xtarget = offset_address (target, pos_rtx,
4700 highest_pow2_factor (position));
4701 xtarget = adjust_address (xtarget, mode, 0);
4702 if (TREE_CODE (value) == CONSTRUCTOR)
4703 store_constructor (value, xtarget, cleared,
4704 bitsize / BITS_PER_UNIT);
4705 else
4706 store_expr (value, xtarget, 0);
4707
4708 expand_exit_loop_if_false (loop,
4709 build (LT_EXPR, integer_type_node,
4710 index, hi_index));
4711
4712 expand_increment (build (PREINCREMENT_EXPR,
4713 TREE_TYPE (index),
4714 index, integer_one_node), 0, 0);
4715 expand_end_loop ();
4716 emit_label (loop_end);
4717 }
4718 }
4719 else if ((index != 0 && ! host_integerp (index, 0))
4720 || ! host_integerp (TYPE_SIZE (elttype), 1))
4721 {
4722 tree position;
4723
4724 if (index == 0)
4725 index = ssize_int (1);
4726
4727 if (minelt)
4728 index = convert (ssizetype,
4729 fold (build (MINUS_EXPR, index,
4730 TYPE_MIN_VALUE (domain))));
4731
4732 position = size_binop (MULT_EXPR, index,
4733 convert (ssizetype,
4734 TYPE_SIZE_UNIT (elttype)));
4735 xtarget = offset_address (target,
4736 expand_expr (position, 0, VOIDmode, 0),
4737 highest_pow2_factor (position));
4738 xtarget = adjust_address (xtarget, mode, 0);
4739 store_expr (value, xtarget, 0);
4740 }
4741 else
4742 {
4743 if (index != 0)
4744 bitpos = ((tree_low_cst (index, 0) - minelt)
4745 * tree_low_cst (TYPE_SIZE (elttype), 1));
4746 else
4747 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4748
4749 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4750 && TYPE_NONALIASED_COMPONENT (type))
4751 {
4752 target = copy_rtx (target);
4753 MEM_KEEP_ALIAS_SET_P (target) = 1;
4754 }
4755
4756 store_constructor_field (target, bitsize, bitpos, mode, value,
4757 type, cleared, get_alias_set (elttype));
4758
4759 }
4760 }
4761 }
4762
4763 /* Set constructor assignments. */
4764 else if (TREE_CODE (type) == SET_TYPE)
4765 {
4766 tree elt = CONSTRUCTOR_ELTS (exp);
4767 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4768 tree domain = TYPE_DOMAIN (type);
4769 tree domain_min, domain_max, bitlength;
4770
4771 /* The default implementation strategy is to extract the constant
4772 parts of the constructor, use that to initialize the target,
4773 and then "or" in whatever non-constant ranges we need in addition.
4774
4775 If a large set is all zero or all ones, it is
4776 probably better to set it using memset (if available) or bzero.
4777 Also, if a large set has just a single range, it may also be
4778 better to first clear all the first clear the set (using
4779 bzero/memset), and set the bits we want. */
4780
4781 /* Check for all zeros. */
4782 if (elt == NULL_TREE && size > 0)
4783 {
4784 if (!cleared)
4785 clear_storage (target, GEN_INT (size));
4786 return;
4787 }
4788
4789 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4790 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4791 bitlength = size_binop (PLUS_EXPR,
4792 size_diffop (domain_max, domain_min),
4793 ssize_int (1));
4794
4795 nbits = tree_low_cst (bitlength, 1);
4796
4797 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4798 are "complicated" (more than one range), initialize (the
4799 constant parts) by copying from a constant. */
4800 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4801 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4802 {
4803 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4804 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4805 char *bit_buffer = (char *) alloca (nbits);
4806 HOST_WIDE_INT word = 0;
4807 unsigned int bit_pos = 0;
4808 unsigned int ibit = 0;
4809 unsigned int offset = 0; /* In bytes from beginning of set. */
4810
4811 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4812 for (;;)
4813 {
4814 if (bit_buffer[ibit])
4815 {
4816 if (BYTES_BIG_ENDIAN)
4817 word |= (1 << (set_word_size - 1 - bit_pos));
4818 else
4819 word |= 1 << bit_pos;
4820 }
4821
4822 bit_pos++; ibit++;
4823 if (bit_pos >= set_word_size || ibit == nbits)
4824 {
4825 if (word != 0 || ! cleared)
4826 {
4827 rtx datum = GEN_INT (word);
4828 rtx to_rtx;
4829
4830 /* The assumption here is that it is safe to use
4831 XEXP if the set is multi-word, but not if
4832 it's single-word. */
4833 if (GET_CODE (target) == MEM)
4834 to_rtx = adjust_address (target, mode, offset);
4835 else if (offset == 0)
4836 to_rtx = target;
4837 else
4838 abort ();
4839 emit_move_insn (to_rtx, datum);
4840 }
4841
4842 if (ibit == nbits)
4843 break;
4844 word = 0;
4845 bit_pos = 0;
4846 offset += set_word_size / BITS_PER_UNIT;
4847 }
4848 }
4849 }
4850 else if (!cleared)
4851 /* Don't bother clearing storage if the set is all ones. */
4852 if (TREE_CHAIN (elt) != NULL_TREE
4853 || (TREE_PURPOSE (elt) == NULL_TREE
4854 ? nbits != 1
4855 : ( ! host_integerp (TREE_VALUE (elt), 0)
4856 || ! host_integerp (TREE_PURPOSE (elt), 0)
4857 || (tree_low_cst (TREE_VALUE (elt), 0)
4858 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4859 != (HOST_WIDE_INT) nbits))))
4860 clear_storage (target, expr_size (exp));
4861
4862 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4863 {
4864 /* Start of range of element or NULL. */
4865 tree startbit = TREE_PURPOSE (elt);
4866 /* End of range of element, or element value. */
4867 tree endbit = TREE_VALUE (elt);
4868 #ifdef TARGET_MEM_FUNCTIONS
4869 HOST_WIDE_INT startb, endb;
4870 #endif
4871 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4872
4873 bitlength_rtx = expand_expr (bitlength,
4874 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4875
4876 /* Handle non-range tuple element like [ expr ]. */
4877 if (startbit == NULL_TREE)
4878 {
4879 startbit = save_expr (endbit);
4880 endbit = startbit;
4881 }
4882
4883 startbit = convert (sizetype, startbit);
4884 endbit = convert (sizetype, endbit);
4885 if (! integer_zerop (domain_min))
4886 {
4887 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4888 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4889 }
4890 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4891 EXPAND_CONST_ADDRESS);
4892 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4893 EXPAND_CONST_ADDRESS);
4894
4895 if (REG_P (target))
4896 {
4897 targetx
4898 = assign_temp
4899 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4900 TYPE_QUAL_CONST)),
4901 0, 1, 1);
4902 emit_move_insn (targetx, target);
4903 }
4904
4905 else if (GET_CODE (target) == MEM)
4906 targetx = target;
4907 else
4908 abort ();
4909
4910 #ifdef TARGET_MEM_FUNCTIONS
4911 /* Optimization: If startbit and endbit are
4912 constants divisible by BITS_PER_UNIT,
4913 call memset instead. */
4914 if (TREE_CODE (startbit) == INTEGER_CST
4915 && TREE_CODE (endbit) == INTEGER_CST
4916 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4917 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4918 {
4919 emit_library_call (memset_libfunc, LCT_NORMAL,
4920 VOIDmode, 3,
4921 plus_constant (XEXP (targetx, 0),
4922 startb / BITS_PER_UNIT),
4923 Pmode,
4924 constm1_rtx, TYPE_MODE (integer_type_node),
4925 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4926 TYPE_MODE (sizetype));
4927 }
4928 else
4929 #endif
4930 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4931 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4932 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4933 startbit_rtx, TYPE_MODE (sizetype),
4934 endbit_rtx, TYPE_MODE (sizetype));
4935
4936 if (REG_P (target))
4937 emit_move_insn (target, targetx);
4938 }
4939 }
4940
4941 else
4942 abort ();
4943 }
4944
4945 /* Store the value of EXP (an expression tree)
4946 into a subfield of TARGET which has mode MODE and occupies
4947 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4948 If MODE is VOIDmode, it means that we are storing into a bit-field.
4949
4950 If VALUE_MODE is VOIDmode, return nothing in particular.
4951 UNSIGNEDP is not used in this case.
4952
4953 Otherwise, return an rtx for the value stored. This rtx
4954 has mode VALUE_MODE if that is convenient to do.
4955 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4956
4957 TYPE is the type of the underlying object,
4958
4959 ALIAS_SET is the alias set for the destination. This value will
4960 (in general) be different from that for TARGET, since TARGET is a
4961 reference to the containing structure. */
4962
4963 static rtx
4964 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
4965 alias_set)
4966 rtx target;
4967 HOST_WIDE_INT bitsize;
4968 HOST_WIDE_INT bitpos;
4969 enum machine_mode mode;
4970 tree exp;
4971 enum machine_mode value_mode;
4972 int unsignedp;
4973 tree type;
4974 int alias_set;
4975 {
4976 HOST_WIDE_INT width_mask = 0;
4977
4978 if (TREE_CODE (exp) == ERROR_MARK)
4979 return const0_rtx;
4980
4981 /* If we have nothing to store, do nothing unless the expression has
4982 side-effects. */
4983 if (bitsize == 0)
4984 return expand_expr (exp, const0_rtx, VOIDmode, 0);
4985 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
4986 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4987
4988 /* If we are storing into an unaligned field of an aligned union that is
4989 in a register, we may have the mode of TARGET being an integer mode but
4990 MODE == BLKmode. In that case, get an aligned object whose size and
4991 alignment are the same as TARGET and store TARGET into it (we can avoid
4992 the store if the field being stored is the entire width of TARGET). Then
4993 call ourselves recursively to store the field into a BLKmode version of
4994 that object. Finally, load from the object into TARGET. This is not
4995 very efficient in general, but should only be slightly more expensive
4996 than the otherwise-required unaligned accesses. Perhaps this can be
4997 cleaned up later. */
4998
4999 if (mode == BLKmode
5000 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5001 {
5002 rtx object
5003 = assign_temp
5004 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5005 0, 1, 1);
5006 rtx blk_object = copy_rtx (object);
5007
5008 PUT_MODE (blk_object, BLKmode);
5009
5010 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5011 emit_move_insn (object, target);
5012
5013 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5014 alias_set);
5015
5016 emit_move_insn (target, object);
5017
5018 /* We want to return the BLKmode version of the data. */
5019 return blk_object;
5020 }
5021
5022 if (GET_CODE (target) == CONCAT)
5023 {
5024 /* We're storing into a struct containing a single __complex. */
5025
5026 if (bitpos != 0)
5027 abort ();
5028 return store_expr (exp, target, 0);
5029 }
5030
5031 /* If the structure is in a register or if the component
5032 is a bit field, we cannot use addressing to access it.
5033 Use bit-field techniques or SUBREG to store in it. */
5034
5035 if (mode == VOIDmode
5036 || (mode != BLKmode && ! direct_store[(int) mode]
5037 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5038 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5039 || GET_CODE (target) == REG
5040 || GET_CODE (target) == SUBREG
5041 /* If the field isn't aligned enough to store as an ordinary memref,
5042 store it as a bit field. */
5043 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5044 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5045 || bitpos % GET_MODE_ALIGNMENT (mode)))
5046 /* If the RHS and field are a constant size and the size of the
5047 RHS isn't the same size as the bitfield, we must use bitfield
5048 operations. */
5049 || (bitsize >= 0
5050 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5051 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5052 {
5053 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5054
5055 /* If BITSIZE is narrower than the size of the type of EXP
5056 we will be narrowing TEMP. Normally, what's wanted are the
5057 low-order bits. However, if EXP's type is a record and this is
5058 big-endian machine, we want the upper BITSIZE bits. */
5059 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5060 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5061 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5062 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5063 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5064 - bitsize),
5065 temp, 1);
5066
5067 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5068 MODE. */
5069 if (mode != VOIDmode && mode != BLKmode
5070 && mode != TYPE_MODE (TREE_TYPE (exp)))
5071 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5072
5073 /* If the modes of TARGET and TEMP are both BLKmode, both
5074 must be in memory and BITPOS must be aligned on a byte
5075 boundary. If so, we simply do a block copy. */
5076 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5077 {
5078 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5079 || bitpos % BITS_PER_UNIT != 0)
5080 abort ();
5081
5082 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5083 emit_block_move (target, temp,
5084 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5085 / BITS_PER_UNIT));
5086
5087 return value_mode == VOIDmode ? const0_rtx : target;
5088 }
5089
5090 /* Store the value in the bitfield. */
5091 store_bit_field (target, bitsize, bitpos, mode, temp,
5092 int_size_in_bytes (type));
5093
5094 if (value_mode != VOIDmode)
5095 {
5096 /* The caller wants an rtx for the value.
5097 If possible, avoid refetching from the bitfield itself. */
5098 if (width_mask != 0
5099 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5100 {
5101 tree count;
5102 enum machine_mode tmode;
5103
5104 if (unsignedp)
5105 return expand_and (temp,
5106 GEN_INT
5107 (trunc_int_for_mode
5108 (width_mask,
5109 GET_MODE (temp) == VOIDmode
5110 ? value_mode
5111 : GET_MODE (temp))), NULL_RTX);
5112
5113 tmode = GET_MODE (temp);
5114 if (tmode == VOIDmode)
5115 tmode = value_mode;
5116 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5117 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5118 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5119 }
5120
5121 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5122 NULL_RTX, value_mode, VOIDmode,
5123 int_size_in_bytes (type));
5124 }
5125 return const0_rtx;
5126 }
5127 else
5128 {
5129 rtx addr = XEXP (target, 0);
5130 rtx to_rtx = target;
5131
5132 /* If a value is wanted, it must be the lhs;
5133 so make the address stable for multiple use. */
5134
5135 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5136 && ! CONSTANT_ADDRESS_P (addr)
5137 /* A frame-pointer reference is already stable. */
5138 && ! (GET_CODE (addr) == PLUS
5139 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5140 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5141 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5142 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5143
5144 /* Now build a reference to just the desired component. */
5145
5146 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5147
5148 if (to_rtx == target)
5149 to_rtx = copy_rtx (to_rtx);
5150
5151 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5152 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5153 set_mem_alias_set (to_rtx, alias_set);
5154
5155 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5156 }
5157 }
5158 \f
5159 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5160 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5161 codes and find the ultimate containing object, which we return.
5162
5163 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5164 bit position, and *PUNSIGNEDP to the signedness of the field.
5165 If the position of the field is variable, we store a tree
5166 giving the variable offset (in units) in *POFFSET.
5167 This offset is in addition to the bit position.
5168 If the position is not variable, we store 0 in *POFFSET.
5169
5170 If any of the extraction expressions is volatile,
5171 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5172
5173 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5174 is a mode that can be used to access the field. In that case, *PBITSIZE
5175 is redundant.
5176
5177 If the field describes a variable-sized object, *PMODE is set to
5178 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5179 this case, but the address of the object can be found. */
5180
5181 tree
5182 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5183 punsignedp, pvolatilep)
5184 tree exp;
5185 HOST_WIDE_INT *pbitsize;
5186 HOST_WIDE_INT *pbitpos;
5187 tree *poffset;
5188 enum machine_mode *pmode;
5189 int *punsignedp;
5190 int *pvolatilep;
5191 {
5192 tree size_tree = 0;
5193 enum machine_mode mode = VOIDmode;
5194 tree offset = size_zero_node;
5195 tree bit_offset = bitsize_zero_node;
5196 tree placeholder_ptr = 0;
5197 tree tem;
5198
5199 /* First get the mode, signedness, and size. We do this from just the
5200 outermost expression. */
5201 if (TREE_CODE (exp) == COMPONENT_REF)
5202 {
5203 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5204 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5205 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5206
5207 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5208 }
5209 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5210 {
5211 size_tree = TREE_OPERAND (exp, 1);
5212 *punsignedp = TREE_UNSIGNED (exp);
5213 }
5214 else
5215 {
5216 mode = TYPE_MODE (TREE_TYPE (exp));
5217 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5218
5219 if (mode == BLKmode)
5220 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5221 else
5222 *pbitsize = GET_MODE_BITSIZE (mode);
5223 }
5224
5225 if (size_tree != 0)
5226 {
5227 if (! host_integerp (size_tree, 1))
5228 mode = BLKmode, *pbitsize = -1;
5229 else
5230 *pbitsize = tree_low_cst (size_tree, 1);
5231 }
5232
5233 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5234 and find the ultimate containing object. */
5235 while (1)
5236 {
5237 if (TREE_CODE (exp) == BIT_FIELD_REF)
5238 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5239 else if (TREE_CODE (exp) == COMPONENT_REF)
5240 {
5241 tree field = TREE_OPERAND (exp, 1);
5242 tree this_offset = DECL_FIELD_OFFSET (field);
5243
5244 /* If this field hasn't been filled in yet, don't go
5245 past it. This should only happen when folding expressions
5246 made during type construction. */
5247 if (this_offset == 0)
5248 break;
5249 else if (! TREE_CONSTANT (this_offset)
5250 && contains_placeholder_p (this_offset))
5251 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5252
5253 offset = size_binop (PLUS_EXPR, offset, this_offset);
5254 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5255 DECL_FIELD_BIT_OFFSET (field));
5256
5257 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5258 }
5259
5260 else if (TREE_CODE (exp) == ARRAY_REF
5261 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5262 {
5263 tree index = TREE_OPERAND (exp, 1);
5264 tree array = TREE_OPERAND (exp, 0);
5265 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5266 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5267 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5268
5269 /* We assume all arrays have sizes that are a multiple of a byte.
5270 First subtract the lower bound, if any, in the type of the
5271 index, then convert to sizetype and multiply by the size of the
5272 array element. */
5273 if (low_bound != 0 && ! integer_zerop (low_bound))
5274 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5275 index, low_bound));
5276
5277 /* If the index has a self-referential type, pass it to a
5278 WITH_RECORD_EXPR; if the component size is, pass our
5279 component to one. */
5280 if (! TREE_CONSTANT (index)
5281 && contains_placeholder_p (index))
5282 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5283 if (! TREE_CONSTANT (unit_size)
5284 && contains_placeholder_p (unit_size))
5285 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5286
5287 offset = size_binop (PLUS_EXPR, offset,
5288 size_binop (MULT_EXPR,
5289 convert (sizetype, index),
5290 unit_size));
5291 }
5292
5293 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5294 {
5295 tree new = find_placeholder (exp, &placeholder_ptr);
5296
5297 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5298 We might have been called from tree optimization where we
5299 haven't set up an object yet. */
5300 if (new == 0)
5301 break;
5302 else
5303 exp = new;
5304
5305 continue;
5306 }
5307 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5308 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5309 && ! ((TREE_CODE (exp) == NOP_EXPR
5310 || TREE_CODE (exp) == CONVERT_EXPR)
5311 && (TYPE_MODE (TREE_TYPE (exp))
5312 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5313 break;
5314
5315 /* If any reference in the chain is volatile, the effect is volatile. */
5316 if (TREE_THIS_VOLATILE (exp))
5317 *pvolatilep = 1;
5318
5319 exp = TREE_OPERAND (exp, 0);
5320 }
5321
5322 /* If OFFSET is constant, see if we can return the whole thing as a
5323 constant bit position. Otherwise, split it up. */
5324 if (host_integerp (offset, 0)
5325 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5326 bitsize_unit_node))
5327 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5328 && host_integerp (tem, 0))
5329 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5330 else
5331 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5332
5333 *pmode = mode;
5334 return exp;
5335 }
5336
5337 /* Return 1 if T is an expression that get_inner_reference handles. */
5338
5339 int
5340 handled_component_p (t)
5341 tree t;
5342 {
5343 switch (TREE_CODE (t))
5344 {
5345 case BIT_FIELD_REF:
5346 case COMPONENT_REF:
5347 case ARRAY_REF:
5348 case ARRAY_RANGE_REF:
5349 case NON_LVALUE_EXPR:
5350 case VIEW_CONVERT_EXPR:
5351 return 1;
5352
5353 case NOP_EXPR:
5354 case CONVERT_EXPR:
5355 return (TYPE_MODE (TREE_TYPE (t))
5356 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5357
5358 default:
5359 return 0;
5360 }
5361 }
5362 \f
5363 /* Given an rtx VALUE that may contain additions and multiplications, return
5364 an equivalent value that just refers to a register, memory, or constant.
5365 This is done by generating instructions to perform the arithmetic and
5366 returning a pseudo-register containing the value.
5367
5368 The returned value may be a REG, SUBREG, MEM or constant. */
5369
5370 rtx
5371 force_operand (value, target)
5372 rtx value, target;
5373 {
5374 optab binoptab = 0;
5375 /* Use a temporary to force order of execution of calls to
5376 `force_operand'. */
5377 rtx tmp;
5378 rtx op2;
5379 /* Use subtarget as the target for operand 0 of a binary operation. */
5380 rtx subtarget = get_subtarget (target);
5381
5382 /* Check for a PIC address load. */
5383 if (flag_pic
5384 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5385 && XEXP (value, 0) == pic_offset_table_rtx
5386 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5387 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5388 || GET_CODE (XEXP (value, 1)) == CONST))
5389 {
5390 if (!subtarget)
5391 subtarget = gen_reg_rtx (GET_MODE (value));
5392 emit_move_insn (subtarget, value);
5393 return subtarget;
5394 }
5395
5396 if (GET_CODE (value) == PLUS)
5397 binoptab = add_optab;
5398 else if (GET_CODE (value) == MINUS)
5399 binoptab = sub_optab;
5400 else if (GET_CODE (value) == MULT)
5401 {
5402 op2 = XEXP (value, 1);
5403 if (!CONSTANT_P (op2)
5404 && !(GET_CODE (op2) == REG && op2 != subtarget))
5405 subtarget = 0;
5406 tmp = force_operand (XEXP (value, 0), subtarget);
5407 return expand_mult (GET_MODE (value), tmp,
5408 force_operand (op2, NULL_RTX),
5409 target, 1);
5410 }
5411
5412 if (binoptab)
5413 {
5414 op2 = XEXP (value, 1);
5415 if (!CONSTANT_P (op2)
5416 && !(GET_CODE (op2) == REG && op2 != subtarget))
5417 subtarget = 0;
5418 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5419 {
5420 binoptab = add_optab;
5421 op2 = negate_rtx (GET_MODE (value), op2);
5422 }
5423
5424 /* Check for an addition with OP2 a constant integer and our first
5425 operand a PLUS of a virtual register and something else. In that
5426 case, we want to emit the sum of the virtual register and the
5427 constant first and then add the other value. This allows virtual
5428 register instantiation to simply modify the constant rather than
5429 creating another one around this addition. */
5430 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5431 && GET_CODE (XEXP (value, 0)) == PLUS
5432 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5433 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5434 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5435 {
5436 rtx temp = expand_binop (GET_MODE (value), binoptab,
5437 XEXP (XEXP (value, 0), 0), op2,
5438 subtarget, 0, OPTAB_LIB_WIDEN);
5439 return expand_binop (GET_MODE (value), binoptab, temp,
5440 force_operand (XEXP (XEXP (value, 0), 1), 0),
5441 target, 0, OPTAB_LIB_WIDEN);
5442 }
5443
5444 tmp = force_operand (XEXP (value, 0), subtarget);
5445 return expand_binop (GET_MODE (value), binoptab, tmp,
5446 force_operand (op2, NULL_RTX),
5447 target, 0, OPTAB_LIB_WIDEN);
5448 /* We give UNSIGNEDP = 0 to expand_binop
5449 because the only operations we are expanding here are signed ones. */
5450 }
5451
5452 #ifdef INSN_SCHEDULING
5453 /* On machines that have insn scheduling, we want all memory reference to be
5454 explicit, so we need to deal with such paradoxical SUBREGs. */
5455 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5456 && (GET_MODE_SIZE (GET_MODE (value))
5457 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5458 value
5459 = simplify_gen_subreg (GET_MODE (value),
5460 force_reg (GET_MODE (SUBREG_REG (value)),
5461 force_operand (SUBREG_REG (value),
5462 NULL_RTX)),
5463 GET_MODE (SUBREG_REG (value)),
5464 SUBREG_BYTE (value));
5465 #endif
5466
5467 return value;
5468 }
5469 \f
5470 /* Subroutine of expand_expr: return nonzero iff there is no way that
5471 EXP can reference X, which is being modified. TOP_P is nonzero if this
5472 call is going to be used to determine whether we need a temporary
5473 for EXP, as opposed to a recursive call to this function.
5474
5475 It is always safe for this routine to return zero since it merely
5476 searches for optimization opportunities. */
5477
5478 int
5479 safe_from_p (x, exp, top_p)
5480 rtx x;
5481 tree exp;
5482 int top_p;
5483 {
5484 rtx exp_rtl = 0;
5485 int i, nops;
5486 static tree save_expr_list;
5487
5488 if (x == 0
5489 /* If EXP has varying size, we MUST use a target since we currently
5490 have no way of allocating temporaries of variable size
5491 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5492 So we assume here that something at a higher level has prevented a
5493 clash. This is somewhat bogus, but the best we can do. Only
5494 do this when X is BLKmode and when we are at the top level. */
5495 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5496 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5497 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5498 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5499 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5500 != INTEGER_CST)
5501 && GET_MODE (x) == BLKmode)
5502 /* If X is in the outgoing argument area, it is always safe. */
5503 || (GET_CODE (x) == MEM
5504 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5505 || (GET_CODE (XEXP (x, 0)) == PLUS
5506 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5507 return 1;
5508
5509 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5510 find the underlying pseudo. */
5511 if (GET_CODE (x) == SUBREG)
5512 {
5513 x = SUBREG_REG (x);
5514 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5515 return 0;
5516 }
5517
5518 /* A SAVE_EXPR might appear many times in the expression passed to the
5519 top-level safe_from_p call, and if it has a complex subexpression,
5520 examining it multiple times could result in a combinatorial explosion.
5521 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5522 with optimization took about 28 minutes to compile -- even though it was
5523 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5524 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5525 we have processed. Note that the only test of top_p was above. */
5526
5527 if (top_p)
5528 {
5529 int rtn;
5530 tree t;
5531
5532 save_expr_list = 0;
5533
5534 rtn = safe_from_p (x, exp, 0);
5535
5536 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5537 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5538
5539 return rtn;
5540 }
5541
5542 /* Now look at our tree code and possibly recurse. */
5543 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5544 {
5545 case 'd':
5546 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5547 break;
5548
5549 case 'c':
5550 return 1;
5551
5552 case 'x':
5553 if (TREE_CODE (exp) == TREE_LIST)
5554 return ((TREE_VALUE (exp) == 0
5555 || safe_from_p (x, TREE_VALUE (exp), 0))
5556 && (TREE_CHAIN (exp) == 0
5557 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5558 else if (TREE_CODE (exp) == ERROR_MARK)
5559 return 1; /* An already-visited SAVE_EXPR? */
5560 else
5561 return 0;
5562
5563 case '1':
5564 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5565
5566 case '2':
5567 case '<':
5568 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5569 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5570
5571 case 'e':
5572 case 'r':
5573 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5574 the expression. If it is set, we conflict iff we are that rtx or
5575 both are in memory. Otherwise, we check all operands of the
5576 expression recursively. */
5577
5578 switch (TREE_CODE (exp))
5579 {
5580 case ADDR_EXPR:
5581 /* If the operand is static or we are static, we can't conflict.
5582 Likewise if we don't conflict with the operand at all. */
5583 if (staticp (TREE_OPERAND (exp, 0))
5584 || TREE_STATIC (exp)
5585 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5586 return 1;
5587
5588 /* Otherwise, the only way this can conflict is if we are taking
5589 the address of a DECL a that address if part of X, which is
5590 very rare. */
5591 exp = TREE_OPERAND (exp, 0);
5592 if (DECL_P (exp))
5593 {
5594 if (!DECL_RTL_SET_P (exp)
5595 || GET_CODE (DECL_RTL (exp)) != MEM)
5596 return 0;
5597 else
5598 exp_rtl = XEXP (DECL_RTL (exp), 0);
5599 }
5600 break;
5601
5602 case INDIRECT_REF:
5603 if (GET_CODE (x) == MEM
5604 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5605 get_alias_set (exp)))
5606 return 0;
5607 break;
5608
5609 case CALL_EXPR:
5610 /* Assume that the call will clobber all hard registers and
5611 all of memory. */
5612 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5613 || GET_CODE (x) == MEM)
5614 return 0;
5615 break;
5616
5617 case RTL_EXPR:
5618 /* If a sequence exists, we would have to scan every instruction
5619 in the sequence to see if it was safe. This is probably not
5620 worthwhile. */
5621 if (RTL_EXPR_SEQUENCE (exp))
5622 return 0;
5623
5624 exp_rtl = RTL_EXPR_RTL (exp);
5625 break;
5626
5627 case WITH_CLEANUP_EXPR:
5628 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5629 break;
5630
5631 case CLEANUP_POINT_EXPR:
5632 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5633
5634 case SAVE_EXPR:
5635 exp_rtl = SAVE_EXPR_RTL (exp);
5636 if (exp_rtl)
5637 break;
5638
5639 /* If we've already scanned this, don't do it again. Otherwise,
5640 show we've scanned it and record for clearing the flag if we're
5641 going on. */
5642 if (TREE_PRIVATE (exp))
5643 return 1;
5644
5645 TREE_PRIVATE (exp) = 1;
5646 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5647 {
5648 TREE_PRIVATE (exp) = 0;
5649 return 0;
5650 }
5651
5652 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5653 return 1;
5654
5655 case BIND_EXPR:
5656 /* The only operand we look at is operand 1. The rest aren't
5657 part of the expression. */
5658 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5659
5660 case METHOD_CALL_EXPR:
5661 /* This takes an rtx argument, but shouldn't appear here. */
5662 abort ();
5663
5664 default:
5665 break;
5666 }
5667
5668 /* If we have an rtx, we do not need to scan our operands. */
5669 if (exp_rtl)
5670 break;
5671
5672 nops = first_rtl_op (TREE_CODE (exp));
5673 for (i = 0; i < nops; i++)
5674 if (TREE_OPERAND (exp, i) != 0
5675 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5676 return 0;
5677
5678 /* If this is a language-specific tree code, it may require
5679 special handling. */
5680 if ((unsigned int) TREE_CODE (exp)
5681 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5682 && !(*lang_hooks.safe_from_p) (x, exp))
5683 return 0;
5684 }
5685
5686 /* If we have an rtl, find any enclosed object. Then see if we conflict
5687 with it. */
5688 if (exp_rtl)
5689 {
5690 if (GET_CODE (exp_rtl) == SUBREG)
5691 {
5692 exp_rtl = SUBREG_REG (exp_rtl);
5693 if (GET_CODE (exp_rtl) == REG
5694 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5695 return 0;
5696 }
5697
5698 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5699 are memory and they conflict. */
5700 return ! (rtx_equal_p (x, exp_rtl)
5701 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5702 && true_dependence (exp_rtl, GET_MODE (x), x,
5703 rtx_addr_varies_p)));
5704 }
5705
5706 /* If we reach here, it is safe. */
5707 return 1;
5708 }
5709
5710 /* Subroutine of expand_expr: return rtx if EXP is a
5711 variable or parameter; else return 0. */
5712
5713 static rtx
5714 var_rtx (exp)
5715 tree exp;
5716 {
5717 STRIP_NOPS (exp);
5718 switch (TREE_CODE (exp))
5719 {
5720 case PARM_DECL:
5721 case VAR_DECL:
5722 return DECL_RTL (exp);
5723 default:
5724 return 0;
5725 }
5726 }
5727
5728 #ifdef MAX_INTEGER_COMPUTATION_MODE
5729
5730 void
5731 check_max_integer_computation_mode (exp)
5732 tree exp;
5733 {
5734 enum tree_code code;
5735 enum machine_mode mode;
5736
5737 /* Strip any NOPs that don't change the mode. */
5738 STRIP_NOPS (exp);
5739 code = TREE_CODE (exp);
5740
5741 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5742 if (code == NOP_EXPR
5743 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5744 return;
5745
5746 /* First check the type of the overall operation. We need only look at
5747 unary, binary and relational operations. */
5748 if (TREE_CODE_CLASS (code) == '1'
5749 || TREE_CODE_CLASS (code) == '2'
5750 || TREE_CODE_CLASS (code) == '<')
5751 {
5752 mode = TYPE_MODE (TREE_TYPE (exp));
5753 if (GET_MODE_CLASS (mode) == MODE_INT
5754 && mode > MAX_INTEGER_COMPUTATION_MODE)
5755 internal_error ("unsupported wide integer operation");
5756 }
5757
5758 /* Check operand of a unary op. */
5759 if (TREE_CODE_CLASS (code) == '1')
5760 {
5761 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5762 if (GET_MODE_CLASS (mode) == MODE_INT
5763 && mode > MAX_INTEGER_COMPUTATION_MODE)
5764 internal_error ("unsupported wide integer operation");
5765 }
5766
5767 /* Check operands of a binary/comparison op. */
5768 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5769 {
5770 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5771 if (GET_MODE_CLASS (mode) == MODE_INT
5772 && mode > MAX_INTEGER_COMPUTATION_MODE)
5773 internal_error ("unsupported wide integer operation");
5774
5775 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5776 if (GET_MODE_CLASS (mode) == MODE_INT
5777 && mode > MAX_INTEGER_COMPUTATION_MODE)
5778 internal_error ("unsupported wide integer operation");
5779 }
5780 }
5781 #endif
5782 \f
5783 /* Return the highest power of two that EXP is known to be a multiple of.
5784 This is used in updating alignment of MEMs in array references. */
5785
5786 static HOST_WIDE_INT
5787 highest_pow2_factor (exp)
5788 tree exp;
5789 {
5790 HOST_WIDE_INT c0, c1;
5791
5792 switch (TREE_CODE (exp))
5793 {
5794 case INTEGER_CST:
5795 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5796 lowest bit that's a one. If the result is zero, return
5797 BIGGEST_ALIGNMENT. We need to handle this case since we can find it
5798 in a COND_EXPR, a MIN_EXPR, or a MAX_EXPR. If the constant overlows,
5799 we have an erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5800 later ICE. */
5801 if (TREE_CONSTANT_OVERFLOW (exp)
5802 || integer_zerop (exp))
5803 return BIGGEST_ALIGNMENT;
5804 else if (host_integerp (exp, 0))
5805 {
5806 c0 = tree_low_cst (exp, 0);
5807 c0 = c0 < 0 ? - c0 : c0;
5808 return c0 & -c0;
5809 }
5810 break;
5811
5812 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5813 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5814 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5815 return MIN (c0, c1);
5816
5817 case MULT_EXPR:
5818 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5819 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5820 return c0 * c1;
5821
5822 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5823 case CEIL_DIV_EXPR:
5824 if (integer_pow2p (TREE_OPERAND (exp, 1))
5825 && host_integerp (TREE_OPERAND (exp, 1), 1))
5826 {
5827 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5828 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5829 return MAX (1, c0 / c1);
5830 }
5831 break;
5832
5833 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5834 case SAVE_EXPR: case WITH_RECORD_EXPR:
5835 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5836
5837 case COMPOUND_EXPR:
5838 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5839
5840 case COND_EXPR:
5841 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5842 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5843 return MIN (c0, c1);
5844
5845 default:
5846 break;
5847 }
5848
5849 return 1;
5850 }
5851 \f
5852 /* Return an object on the placeholder list that matches EXP, a
5853 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5854 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5855 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5856 is a location which initially points to a starting location in the
5857 placeholder list (zero means start of the list) and where a pointer into
5858 the placeholder list at which the object is found is placed. */
5859
5860 tree
5861 find_placeholder (exp, plist)
5862 tree exp;
5863 tree *plist;
5864 {
5865 tree type = TREE_TYPE (exp);
5866 tree placeholder_expr;
5867
5868 for (placeholder_expr
5869 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5870 placeholder_expr != 0;
5871 placeholder_expr = TREE_CHAIN (placeholder_expr))
5872 {
5873 tree need_type = TYPE_MAIN_VARIANT (type);
5874 tree elt;
5875
5876 /* Find the outermost reference that is of the type we want. If none,
5877 see if any object has a type that is a pointer to the type we
5878 want. */
5879 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5880 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5881 || TREE_CODE (elt) == COND_EXPR)
5882 ? TREE_OPERAND (elt, 1)
5883 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5884 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5885 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5886 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5887 ? TREE_OPERAND (elt, 0) : 0))
5888 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5889 {
5890 if (plist)
5891 *plist = placeholder_expr;
5892 return elt;
5893 }
5894
5895 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5896 elt
5897 = ((TREE_CODE (elt) == COMPOUND_EXPR
5898 || TREE_CODE (elt) == COND_EXPR)
5899 ? TREE_OPERAND (elt, 1)
5900 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5901 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5902 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5903 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5904 ? TREE_OPERAND (elt, 0) : 0))
5905 if (POINTER_TYPE_P (TREE_TYPE (elt))
5906 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5907 == need_type))
5908 {
5909 if (plist)
5910 *plist = placeholder_expr;
5911 return build1 (INDIRECT_REF, need_type, elt);
5912 }
5913 }
5914
5915 return 0;
5916 }
5917 \f
5918 /* expand_expr: generate code for computing expression EXP.
5919 An rtx for the computed value is returned. The value is never null.
5920 In the case of a void EXP, const0_rtx is returned.
5921
5922 The value may be stored in TARGET if TARGET is nonzero.
5923 TARGET is just a suggestion; callers must assume that
5924 the rtx returned may not be the same as TARGET.
5925
5926 If TARGET is CONST0_RTX, it means that the value will be ignored.
5927
5928 If TMODE is not VOIDmode, it suggests generating the
5929 result in mode TMODE. But this is done only when convenient.
5930 Otherwise, TMODE is ignored and the value generated in its natural mode.
5931 TMODE is just a suggestion; callers must assume that
5932 the rtx returned may not have mode TMODE.
5933
5934 Note that TARGET may have neither TMODE nor MODE. In that case, it
5935 probably will not be used.
5936
5937 If MODIFIER is EXPAND_SUM then when EXP is an addition
5938 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5939 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5940 products as above, or REG or MEM, or constant.
5941 Ordinarily in such cases we would output mul or add instructions
5942 and then return a pseudo reg containing the sum.
5943
5944 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5945 it also marks a label as absolutely required (it can't be dead).
5946 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5947 This is used for outputting expressions used in initializers.
5948
5949 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5950 with a constant address even if that address is not normally legitimate.
5951 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5952
5953 rtx
5954 expand_expr (exp, target, tmode, modifier)
5955 tree exp;
5956 rtx target;
5957 enum machine_mode tmode;
5958 enum expand_modifier modifier;
5959 {
5960 rtx op0, op1, temp;
5961 tree type = TREE_TYPE (exp);
5962 int unsignedp = TREE_UNSIGNED (type);
5963 enum machine_mode mode;
5964 enum tree_code code = TREE_CODE (exp);
5965 optab this_optab;
5966 rtx subtarget, original_target;
5967 int ignore;
5968 tree context;
5969
5970 /* Handle ERROR_MARK before anybody tries to access its type. */
5971 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5972 {
5973 op0 = CONST0_RTX (tmode);
5974 if (op0 != 0)
5975 return op0;
5976 return const0_rtx;
5977 }
5978
5979 mode = TYPE_MODE (type);
5980 /* Use subtarget as the target for operand 0 of a binary operation. */
5981 subtarget = get_subtarget (target);
5982 original_target = target;
5983 ignore = (target == const0_rtx
5984 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5985 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5986 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
5987 && TREE_CODE (type) == VOID_TYPE));
5988
5989 /* If we are going to ignore this result, we need only do something
5990 if there is a side-effect somewhere in the expression. If there
5991 is, short-circuit the most common cases here. Note that we must
5992 not call expand_expr with anything but const0_rtx in case this
5993 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5994
5995 if (ignore)
5996 {
5997 if (! TREE_SIDE_EFFECTS (exp))
5998 return const0_rtx;
5999
6000 /* Ensure we reference a volatile object even if value is ignored, but
6001 don't do this if all we are doing is taking its address. */
6002 if (TREE_THIS_VOLATILE (exp)
6003 && TREE_CODE (exp) != FUNCTION_DECL
6004 && mode != VOIDmode && mode != BLKmode
6005 && modifier != EXPAND_CONST_ADDRESS)
6006 {
6007 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6008 if (GET_CODE (temp) == MEM)
6009 temp = copy_to_reg (temp);
6010 return const0_rtx;
6011 }
6012
6013 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6014 || code == INDIRECT_REF || code == BUFFER_REF)
6015 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6016 modifier);
6017
6018 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6019 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6020 {
6021 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6022 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6023 return const0_rtx;
6024 }
6025 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6026 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6027 /* If the second operand has no side effects, just evaluate
6028 the first. */
6029 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6030 modifier);
6031 else if (code == BIT_FIELD_REF)
6032 {
6033 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6034 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6035 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6036 return const0_rtx;
6037 }
6038
6039 target = 0;
6040 }
6041
6042 #ifdef MAX_INTEGER_COMPUTATION_MODE
6043 /* Only check stuff here if the mode we want is different from the mode
6044 of the expression; if it's the same, check_max_integer_computiation_mode
6045 will handle it. Do we really need to check this stuff at all? */
6046
6047 if (target
6048 && GET_MODE (target) != mode
6049 && TREE_CODE (exp) != INTEGER_CST
6050 && TREE_CODE (exp) != PARM_DECL
6051 && TREE_CODE (exp) != ARRAY_REF
6052 && TREE_CODE (exp) != ARRAY_RANGE_REF
6053 && TREE_CODE (exp) != COMPONENT_REF
6054 && TREE_CODE (exp) != BIT_FIELD_REF
6055 && TREE_CODE (exp) != INDIRECT_REF
6056 && TREE_CODE (exp) != CALL_EXPR
6057 && TREE_CODE (exp) != VAR_DECL
6058 && TREE_CODE (exp) != RTL_EXPR)
6059 {
6060 enum machine_mode mode = GET_MODE (target);
6061
6062 if (GET_MODE_CLASS (mode) == MODE_INT
6063 && mode > MAX_INTEGER_COMPUTATION_MODE)
6064 internal_error ("unsupported wide integer operation");
6065 }
6066
6067 if (tmode != mode
6068 && TREE_CODE (exp) != INTEGER_CST
6069 && TREE_CODE (exp) != PARM_DECL
6070 && TREE_CODE (exp) != ARRAY_REF
6071 && TREE_CODE (exp) != ARRAY_RANGE_REF
6072 && TREE_CODE (exp) != COMPONENT_REF
6073 && TREE_CODE (exp) != BIT_FIELD_REF
6074 && TREE_CODE (exp) != INDIRECT_REF
6075 && TREE_CODE (exp) != VAR_DECL
6076 && TREE_CODE (exp) != CALL_EXPR
6077 && TREE_CODE (exp) != RTL_EXPR
6078 && GET_MODE_CLASS (tmode) == MODE_INT
6079 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6080 internal_error ("unsupported wide integer operation");
6081
6082 check_max_integer_computation_mode (exp);
6083 #endif
6084
6085 /* If will do cse, generate all results into pseudo registers
6086 since 1) that allows cse to find more things
6087 and 2) otherwise cse could produce an insn the machine
6088 cannot support. And exception is a CONSTRUCTOR into a multi-word
6089 MEM: that's much more likely to be most efficient into the MEM. */
6090
6091 if (! cse_not_expected && mode != BLKmode && target
6092 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6093 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6094 target = subtarget;
6095
6096 switch (code)
6097 {
6098 case LABEL_DECL:
6099 {
6100 tree function = decl_function_context (exp);
6101 /* Handle using a label in a containing function. */
6102 if (function != current_function_decl
6103 && function != inline_function_decl && function != 0)
6104 {
6105 struct function *p = find_function_data (function);
6106 p->expr->x_forced_labels
6107 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6108 p->expr->x_forced_labels);
6109 }
6110 else
6111 {
6112 if (modifier == EXPAND_INITIALIZER)
6113 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6114 label_rtx (exp),
6115 forced_labels);
6116 }
6117
6118 temp = gen_rtx_MEM (FUNCTION_MODE,
6119 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6120 if (function != current_function_decl
6121 && function != inline_function_decl && function != 0)
6122 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6123 return temp;
6124 }
6125
6126 case PARM_DECL:
6127 if (DECL_RTL (exp) == 0)
6128 {
6129 error_with_decl (exp, "prior parameter's size depends on `%s'");
6130 return CONST0_RTX (mode);
6131 }
6132
6133 /* ... fall through ... */
6134
6135 case VAR_DECL:
6136 /* If a static var's type was incomplete when the decl was written,
6137 but the type is complete now, lay out the decl now. */
6138 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6139 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6140 {
6141 rtx value = DECL_RTL_IF_SET (exp);
6142
6143 layout_decl (exp, 0);
6144
6145 /* If the RTL was already set, update its mode and memory
6146 attributes. */
6147 if (value != 0)
6148 {
6149 PUT_MODE (value, DECL_MODE (exp));
6150 SET_DECL_RTL (exp, 0);
6151 set_mem_attributes (value, exp, 1);
6152 SET_DECL_RTL (exp, value);
6153 }
6154 }
6155
6156 /* ... fall through ... */
6157
6158 case FUNCTION_DECL:
6159 case RESULT_DECL:
6160 if (DECL_RTL (exp) == 0)
6161 abort ();
6162
6163 /* Ensure variable marked as used even if it doesn't go through
6164 a parser. If it hasn't be used yet, write out an external
6165 definition. */
6166 if (! TREE_USED (exp))
6167 {
6168 assemble_external (exp);
6169 TREE_USED (exp) = 1;
6170 }
6171
6172 /* Show we haven't gotten RTL for this yet. */
6173 temp = 0;
6174
6175 /* Handle variables inherited from containing functions. */
6176 context = decl_function_context (exp);
6177
6178 /* We treat inline_function_decl as an alias for the current function
6179 because that is the inline function whose vars, types, etc.
6180 are being merged into the current function.
6181 See expand_inline_function. */
6182
6183 if (context != 0 && context != current_function_decl
6184 && context != inline_function_decl
6185 /* If var is static, we don't need a static chain to access it. */
6186 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6187 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6188 {
6189 rtx addr;
6190
6191 /* Mark as non-local and addressable. */
6192 DECL_NONLOCAL (exp) = 1;
6193 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6194 abort ();
6195 mark_addressable (exp);
6196 if (GET_CODE (DECL_RTL (exp)) != MEM)
6197 abort ();
6198 addr = XEXP (DECL_RTL (exp), 0);
6199 if (GET_CODE (addr) == MEM)
6200 addr
6201 = replace_equiv_address (addr,
6202 fix_lexical_addr (XEXP (addr, 0), exp));
6203 else
6204 addr = fix_lexical_addr (addr, exp);
6205
6206 temp = replace_equiv_address (DECL_RTL (exp), addr);
6207 }
6208
6209 /* This is the case of an array whose size is to be determined
6210 from its initializer, while the initializer is still being parsed.
6211 See expand_decl. */
6212
6213 else if (GET_CODE (DECL_RTL (exp)) == MEM
6214 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6215 temp = validize_mem (DECL_RTL (exp));
6216
6217 /* If DECL_RTL is memory, we are in the normal case and either
6218 the address is not valid or it is not a register and -fforce-addr
6219 is specified, get the address into a register. */
6220
6221 else if (GET_CODE (DECL_RTL (exp)) == MEM
6222 && modifier != EXPAND_CONST_ADDRESS
6223 && modifier != EXPAND_SUM
6224 && modifier != EXPAND_INITIALIZER
6225 && (! memory_address_p (DECL_MODE (exp),
6226 XEXP (DECL_RTL (exp), 0))
6227 || (flag_force_addr
6228 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6229 temp = replace_equiv_address (DECL_RTL (exp),
6230 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6231
6232 /* If we got something, return it. But first, set the alignment
6233 if the address is a register. */
6234 if (temp != 0)
6235 {
6236 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6237 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6238
6239 return temp;
6240 }
6241
6242 /* If the mode of DECL_RTL does not match that of the decl, it
6243 must be a promoted value. We return a SUBREG of the wanted mode,
6244 but mark it so that we know that it was already extended. */
6245
6246 if (GET_CODE (DECL_RTL (exp)) == REG
6247 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6248 {
6249 /* Get the signedness used for this variable. Ensure we get the
6250 same mode we got when the variable was declared. */
6251 if (GET_MODE (DECL_RTL (exp))
6252 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6253 abort ();
6254
6255 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6256 SUBREG_PROMOTED_VAR_P (temp) = 1;
6257 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6258 return temp;
6259 }
6260
6261 return DECL_RTL (exp);
6262
6263 case INTEGER_CST:
6264 return immed_double_const (TREE_INT_CST_LOW (exp),
6265 TREE_INT_CST_HIGH (exp), mode);
6266
6267 case CONST_DECL:
6268 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6269
6270 case REAL_CST:
6271 /* If optimized, generate immediate CONST_DOUBLE
6272 which will be turned into memory by reload if necessary.
6273
6274 We used to force a register so that loop.c could see it. But
6275 this does not allow gen_* patterns to perform optimizations with
6276 the constants. It also produces two insns in cases like "x = 1.0;".
6277 On most machines, floating-point constants are not permitted in
6278 many insns, so we'd end up copying it to a register in any case.
6279
6280 Now, we do the copying in expand_binop, if appropriate. */
6281 return immed_real_const (exp);
6282
6283 case COMPLEX_CST:
6284 case STRING_CST:
6285 if (! TREE_CST_RTL (exp))
6286 output_constant_def (exp, 1);
6287
6288 /* TREE_CST_RTL probably contains a constant address.
6289 On RISC machines where a constant address isn't valid,
6290 make some insns to get that address into a register. */
6291 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6292 && modifier != EXPAND_CONST_ADDRESS
6293 && modifier != EXPAND_INITIALIZER
6294 && modifier != EXPAND_SUM
6295 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6296 || (flag_force_addr
6297 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6298 return replace_equiv_address (TREE_CST_RTL (exp),
6299 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6300 return TREE_CST_RTL (exp);
6301
6302 case EXPR_WITH_FILE_LOCATION:
6303 {
6304 rtx to_return;
6305 const char *saved_input_filename = input_filename;
6306 int saved_lineno = lineno;
6307 input_filename = EXPR_WFL_FILENAME (exp);
6308 lineno = EXPR_WFL_LINENO (exp);
6309 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6310 emit_line_note (input_filename, lineno);
6311 /* Possibly avoid switching back and forth here. */
6312 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6313 input_filename = saved_input_filename;
6314 lineno = saved_lineno;
6315 return to_return;
6316 }
6317
6318 case SAVE_EXPR:
6319 context = decl_function_context (exp);
6320
6321 /* If this SAVE_EXPR was at global context, assume we are an
6322 initialization function and move it into our context. */
6323 if (context == 0)
6324 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6325
6326 /* We treat inline_function_decl as an alias for the current function
6327 because that is the inline function whose vars, types, etc.
6328 are being merged into the current function.
6329 See expand_inline_function. */
6330 if (context == current_function_decl || context == inline_function_decl)
6331 context = 0;
6332
6333 /* If this is non-local, handle it. */
6334 if (context)
6335 {
6336 /* The following call just exists to abort if the context is
6337 not of a containing function. */
6338 find_function_data (context);
6339
6340 temp = SAVE_EXPR_RTL (exp);
6341 if (temp && GET_CODE (temp) == REG)
6342 {
6343 put_var_into_stack (exp);
6344 temp = SAVE_EXPR_RTL (exp);
6345 }
6346 if (temp == 0 || GET_CODE (temp) != MEM)
6347 abort ();
6348 return
6349 replace_equiv_address (temp,
6350 fix_lexical_addr (XEXP (temp, 0), exp));
6351 }
6352 if (SAVE_EXPR_RTL (exp) == 0)
6353 {
6354 if (mode == VOIDmode)
6355 temp = const0_rtx;
6356 else
6357 temp = assign_temp (build_qualified_type (type,
6358 (TYPE_QUALS (type)
6359 | TYPE_QUAL_CONST)),
6360 3, 0, 0);
6361
6362 SAVE_EXPR_RTL (exp) = temp;
6363 if (!optimize && GET_CODE (temp) == REG)
6364 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6365 save_expr_regs);
6366
6367 /* If the mode of TEMP does not match that of the expression, it
6368 must be a promoted value. We pass store_expr a SUBREG of the
6369 wanted mode but mark it so that we know that it was already
6370 extended. Note that `unsignedp' was modified above in
6371 this case. */
6372
6373 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6374 {
6375 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6376 SUBREG_PROMOTED_VAR_P (temp) = 1;
6377 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6378 }
6379
6380 if (temp == const0_rtx)
6381 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6382 else
6383 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6384
6385 TREE_USED (exp) = 1;
6386 }
6387
6388 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6389 must be a promoted value. We return a SUBREG of the wanted mode,
6390 but mark it so that we know that it was already extended. */
6391
6392 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6393 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6394 {
6395 /* Compute the signedness and make the proper SUBREG. */
6396 promote_mode (type, mode, &unsignedp, 0);
6397 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6398 SUBREG_PROMOTED_VAR_P (temp) = 1;
6399 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6400 return temp;
6401 }
6402
6403 return SAVE_EXPR_RTL (exp);
6404
6405 case UNSAVE_EXPR:
6406 {
6407 rtx temp;
6408 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6409 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6410 return temp;
6411 }
6412
6413 case PLACEHOLDER_EXPR:
6414 {
6415 tree old_list = placeholder_list;
6416 tree placeholder_expr = 0;
6417
6418 exp = find_placeholder (exp, &placeholder_expr);
6419 if (exp == 0)
6420 abort ();
6421
6422 placeholder_list = TREE_CHAIN (placeholder_expr);
6423 temp = expand_expr (exp, original_target, tmode, modifier);
6424 placeholder_list = old_list;
6425 return temp;
6426 }
6427
6428 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6429 abort ();
6430
6431 case WITH_RECORD_EXPR:
6432 /* Put the object on the placeholder list, expand our first operand,
6433 and pop the list. */
6434 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6435 placeholder_list);
6436 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6437 modifier);
6438 placeholder_list = TREE_CHAIN (placeholder_list);
6439 return target;
6440
6441 case GOTO_EXPR:
6442 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6443 expand_goto (TREE_OPERAND (exp, 0));
6444 else
6445 expand_computed_goto (TREE_OPERAND (exp, 0));
6446 return const0_rtx;
6447
6448 case EXIT_EXPR:
6449 expand_exit_loop_if_false (NULL,
6450 invert_truthvalue (TREE_OPERAND (exp, 0)));
6451 return const0_rtx;
6452
6453 case LABELED_BLOCK_EXPR:
6454 if (LABELED_BLOCK_BODY (exp))
6455 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6456 /* Should perhaps use expand_label, but this is simpler and safer. */
6457 do_pending_stack_adjust ();
6458 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6459 return const0_rtx;
6460
6461 case EXIT_BLOCK_EXPR:
6462 if (EXIT_BLOCK_RETURN (exp))
6463 sorry ("returned value in block_exit_expr");
6464 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6465 return const0_rtx;
6466
6467 case LOOP_EXPR:
6468 push_temp_slots ();
6469 expand_start_loop (1);
6470 expand_expr_stmt (TREE_OPERAND (exp, 0));
6471 expand_end_loop ();
6472 pop_temp_slots ();
6473
6474 return const0_rtx;
6475
6476 case BIND_EXPR:
6477 {
6478 tree vars = TREE_OPERAND (exp, 0);
6479 int vars_need_expansion = 0;
6480
6481 /* Need to open a binding contour here because
6482 if there are any cleanups they must be contained here. */
6483 expand_start_bindings (2);
6484
6485 /* Mark the corresponding BLOCK for output in its proper place. */
6486 if (TREE_OPERAND (exp, 2) != 0
6487 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6488 insert_block (TREE_OPERAND (exp, 2));
6489
6490 /* If VARS have not yet been expanded, expand them now. */
6491 while (vars)
6492 {
6493 if (!DECL_RTL_SET_P (vars))
6494 {
6495 vars_need_expansion = 1;
6496 expand_decl (vars);
6497 }
6498 expand_decl_init (vars);
6499 vars = TREE_CHAIN (vars);
6500 }
6501
6502 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6503
6504 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6505
6506 return temp;
6507 }
6508
6509 case RTL_EXPR:
6510 if (RTL_EXPR_SEQUENCE (exp))
6511 {
6512 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6513 abort ();
6514 emit_insns (RTL_EXPR_SEQUENCE (exp));
6515 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6516 }
6517 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6518 free_temps_for_rtl_expr (exp);
6519 return RTL_EXPR_RTL (exp);
6520
6521 case CONSTRUCTOR:
6522 /* If we don't need the result, just ensure we evaluate any
6523 subexpressions. */
6524 if (ignore)
6525 {
6526 tree elt;
6527
6528 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6529 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6530
6531 return const0_rtx;
6532 }
6533
6534 /* All elts simple constants => refer to a constant in memory. But
6535 if this is a non-BLKmode mode, let it store a field at a time
6536 since that should make a CONST_INT or CONST_DOUBLE when we
6537 fold. Likewise, if we have a target we can use, it is best to
6538 store directly into the target unless the type is large enough
6539 that memcpy will be used. If we are making an initializer and
6540 all operands are constant, put it in memory as well. */
6541 else if ((TREE_STATIC (exp)
6542 && ((mode == BLKmode
6543 && ! (target != 0 && safe_from_p (target, exp, 1)))
6544 || TREE_ADDRESSABLE (exp)
6545 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6546 && (! MOVE_BY_PIECES_P
6547 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6548 TYPE_ALIGN (type)))
6549 && ! mostly_zeros_p (exp))))
6550 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6551 {
6552 rtx constructor = output_constant_def (exp, 1);
6553
6554 if (modifier != EXPAND_CONST_ADDRESS
6555 && modifier != EXPAND_INITIALIZER
6556 && modifier != EXPAND_SUM)
6557 constructor = validize_mem (constructor);
6558
6559 return constructor;
6560 }
6561 else
6562 {
6563 /* Handle calls that pass values in multiple non-contiguous
6564 locations. The Irix 6 ABI has examples of this. */
6565 if (target == 0 || ! safe_from_p (target, exp, 1)
6566 || GET_CODE (target) == PARALLEL)
6567 target
6568 = assign_temp (build_qualified_type (type,
6569 (TYPE_QUALS (type)
6570 | (TREE_READONLY (exp)
6571 * TYPE_QUAL_CONST))),
6572 0, TREE_ADDRESSABLE (exp), 1);
6573
6574 store_constructor (exp, target, 0,
6575 int_size_in_bytes (TREE_TYPE (exp)));
6576 return target;
6577 }
6578
6579 case INDIRECT_REF:
6580 {
6581 tree exp1 = TREE_OPERAND (exp, 0);
6582 tree index;
6583 tree string = string_constant (exp1, &index);
6584
6585 /* Try to optimize reads from const strings. */
6586 if (string
6587 && TREE_CODE (string) == STRING_CST
6588 && TREE_CODE (index) == INTEGER_CST
6589 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6590 && GET_MODE_CLASS (mode) == MODE_INT
6591 && GET_MODE_SIZE (mode) == 1
6592 && modifier != EXPAND_WRITE)
6593 return
6594 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6595
6596 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6597 op0 = memory_address (mode, op0);
6598 temp = gen_rtx_MEM (mode, op0);
6599 set_mem_attributes (temp, exp, 0);
6600
6601 /* If we are writing to this object and its type is a record with
6602 readonly fields, we must mark it as readonly so it will
6603 conflict with readonly references to those fields. */
6604 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6605 RTX_UNCHANGING_P (temp) = 1;
6606
6607 return temp;
6608 }
6609
6610 case ARRAY_REF:
6611 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6612 abort ();
6613
6614 {
6615 tree array = TREE_OPERAND (exp, 0);
6616 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6617 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6618 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6619 HOST_WIDE_INT i;
6620
6621 /* Optimize the special-case of a zero lower bound.
6622
6623 We convert the low_bound to sizetype to avoid some problems
6624 with constant folding. (E.g. suppose the lower bound is 1,
6625 and its mode is QI. Without the conversion, (ARRAY
6626 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6627 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6628
6629 if (! integer_zerop (low_bound))
6630 index = size_diffop (index, convert (sizetype, low_bound));
6631
6632 /* Fold an expression like: "foo"[2].
6633 This is not done in fold so it won't happen inside &.
6634 Don't fold if this is for wide characters since it's too
6635 difficult to do correctly and this is a very rare case. */
6636
6637 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6638 && TREE_CODE (array) == STRING_CST
6639 && TREE_CODE (index) == INTEGER_CST
6640 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6641 && GET_MODE_CLASS (mode) == MODE_INT
6642 && GET_MODE_SIZE (mode) == 1)
6643 return
6644 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6645
6646 /* If this is a constant index into a constant array,
6647 just get the value from the array. Handle both the cases when
6648 we have an explicit constructor and when our operand is a variable
6649 that was declared const. */
6650
6651 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6652 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6653 && TREE_CODE (index) == INTEGER_CST
6654 && 0 > compare_tree_int (index,
6655 list_length (CONSTRUCTOR_ELTS
6656 (TREE_OPERAND (exp, 0)))))
6657 {
6658 tree elem;
6659
6660 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6661 i = TREE_INT_CST_LOW (index);
6662 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6663 ;
6664
6665 if (elem)
6666 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6667 modifier);
6668 }
6669
6670 else if (optimize >= 1
6671 && modifier != EXPAND_CONST_ADDRESS
6672 && modifier != EXPAND_INITIALIZER
6673 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6674 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6675 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6676 {
6677 if (TREE_CODE (index) == INTEGER_CST)
6678 {
6679 tree init = DECL_INITIAL (array);
6680
6681 if (TREE_CODE (init) == CONSTRUCTOR)
6682 {
6683 tree elem;
6684
6685 for (elem = CONSTRUCTOR_ELTS (init);
6686 (elem
6687 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6688 elem = TREE_CHAIN (elem))
6689 ;
6690
6691 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6692 return expand_expr (fold (TREE_VALUE (elem)), target,
6693 tmode, modifier);
6694 }
6695 else if (TREE_CODE (init) == STRING_CST
6696 && 0 > compare_tree_int (index,
6697 TREE_STRING_LENGTH (init)))
6698 {
6699 tree type = TREE_TYPE (TREE_TYPE (init));
6700 enum machine_mode mode = TYPE_MODE (type);
6701
6702 if (GET_MODE_CLASS (mode) == MODE_INT
6703 && GET_MODE_SIZE (mode) == 1)
6704 return (GEN_INT
6705 (TREE_STRING_POINTER
6706 (init)[TREE_INT_CST_LOW (index)]));
6707 }
6708 }
6709 }
6710 }
6711 /* Fall through. */
6712
6713 case COMPONENT_REF:
6714 case BIT_FIELD_REF:
6715 case ARRAY_RANGE_REF:
6716 /* If the operand is a CONSTRUCTOR, we can just extract the
6717 appropriate field if it is present. Don't do this if we have
6718 already written the data since we want to refer to that copy
6719 and varasm.c assumes that's what we'll do. */
6720 if (code == COMPONENT_REF
6721 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6722 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6723 {
6724 tree elt;
6725
6726 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6727 elt = TREE_CHAIN (elt))
6728 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6729 /* We can normally use the value of the field in the
6730 CONSTRUCTOR. However, if this is a bitfield in
6731 an integral mode that we can fit in a HOST_WIDE_INT,
6732 we must mask only the number of bits in the bitfield,
6733 since this is done implicitly by the constructor. If
6734 the bitfield does not meet either of those conditions,
6735 we can't do this optimization. */
6736 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6737 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6738 == MODE_INT)
6739 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6740 <= HOST_BITS_PER_WIDE_INT))))
6741 {
6742 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6743 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6744 {
6745 HOST_WIDE_INT bitsize
6746 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6747
6748 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6749 {
6750 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6751 op0 = expand_and (op0, op1, target);
6752 }
6753 else
6754 {
6755 enum machine_mode imode
6756 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6757 tree count
6758 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6759 0);
6760
6761 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6762 target, 0);
6763 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6764 target, 0);
6765 }
6766 }
6767
6768 return op0;
6769 }
6770 }
6771
6772 {
6773 enum machine_mode mode1;
6774 HOST_WIDE_INT bitsize, bitpos;
6775 tree offset;
6776 int volatilep = 0;
6777 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6778 &mode1, &unsignedp, &volatilep);
6779 rtx orig_op0;
6780
6781 /* If we got back the original object, something is wrong. Perhaps
6782 we are evaluating an expression too early. In any event, don't
6783 infinitely recurse. */
6784 if (tem == exp)
6785 abort ();
6786
6787 /* If TEM's type is a union of variable size, pass TARGET to the inner
6788 computation, since it will need a temporary and TARGET is known
6789 to have to do. This occurs in unchecked conversion in Ada. */
6790
6791 orig_op0 = op0
6792 = expand_expr (tem,
6793 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6794 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6795 != INTEGER_CST)
6796 ? target : NULL_RTX),
6797 VOIDmode,
6798 (modifier == EXPAND_INITIALIZER
6799 || modifier == EXPAND_CONST_ADDRESS)
6800 ? modifier : EXPAND_NORMAL);
6801
6802 /* If this is a constant, put it into a register if it is a
6803 legitimate constant and OFFSET is 0 and memory if it isn't. */
6804 if (CONSTANT_P (op0))
6805 {
6806 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6807 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6808 && offset == 0)
6809 op0 = force_reg (mode, op0);
6810 else
6811 op0 = validize_mem (force_const_mem (mode, op0));
6812 }
6813
6814 if (offset != 0)
6815 {
6816 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6817
6818 /* If this object is in a register, put it into memory.
6819 This case can't occur in C, but can in Ada if we have
6820 unchecked conversion of an expression from a scalar type to
6821 an array or record type. */
6822 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6823 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6824 {
6825 /* If the operand is a SAVE_EXPR, we can deal with this by
6826 forcing the SAVE_EXPR into memory. */
6827 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6828 {
6829 put_var_into_stack (TREE_OPERAND (exp, 0));
6830 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6831 }
6832 else
6833 {
6834 tree nt
6835 = build_qualified_type (TREE_TYPE (tem),
6836 (TYPE_QUALS (TREE_TYPE (tem))
6837 | TYPE_QUAL_CONST));
6838 rtx memloc = assign_temp (nt, 1, 1, 1);
6839
6840 emit_move_insn (memloc, op0);
6841 op0 = memloc;
6842 }
6843 }
6844
6845 if (GET_CODE (op0) != MEM)
6846 abort ();
6847
6848 if (GET_MODE (offset_rtx) != ptr_mode)
6849 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6850
6851 #ifdef POINTERS_EXTEND_UNSIGNED
6852 if (GET_MODE (offset_rtx) != Pmode)
6853 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6854 #endif
6855
6856 /* A constant address in OP0 can have VOIDmode, we must not try
6857 to call force_reg for that case. Avoid that case. */
6858 if (GET_CODE (op0) == MEM
6859 && GET_MODE (op0) == BLKmode
6860 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6861 && bitsize != 0
6862 && (bitpos % bitsize) == 0
6863 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6864 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6865 {
6866 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6867
6868 if (GET_CODE (XEXP (temp, 0)) == REG)
6869 op0 = temp;
6870 else
6871 op0 = (replace_equiv_address
6872 (op0,
6873 force_reg (GET_MODE (XEXP (temp, 0)),
6874 XEXP (temp, 0))));
6875 bitpos = 0;
6876 }
6877
6878 op0 = offset_address (op0, offset_rtx,
6879 highest_pow2_factor (offset));
6880 }
6881
6882 /* Don't forget about volatility even if this is a bitfield. */
6883 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6884 {
6885 if (op0 == orig_op0)
6886 op0 = copy_rtx (op0);
6887
6888 MEM_VOLATILE_P (op0) = 1;
6889 }
6890
6891 /* In cases where an aligned union has an unaligned object
6892 as a field, we might be extracting a BLKmode value from
6893 an integer-mode (e.g., SImode) object. Handle this case
6894 by doing the extract into an object as wide as the field
6895 (which we know to be the width of a basic mode), then
6896 storing into memory, and changing the mode to BLKmode. */
6897 if (mode1 == VOIDmode
6898 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6899 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6900 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6901 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6902 && modifier != EXPAND_CONST_ADDRESS
6903 && modifier != EXPAND_INITIALIZER)
6904 /* If the field isn't aligned enough to fetch as a memref,
6905 fetch it as a bit field. */
6906 || (mode1 != BLKmode
6907 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6908 && ((TYPE_ALIGN (TREE_TYPE (tem))
6909 < GET_MODE_ALIGNMENT (mode))
6910 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6911 /* If the type and the field are a constant size and the
6912 size of the type isn't the same size as the bitfield,
6913 we must use bitfield operations. */
6914 || (bitsize >= 0
6915 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6916 == INTEGER_CST)
6917 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6918 bitsize)))
6919 {
6920 enum machine_mode ext_mode = mode;
6921
6922 if (ext_mode == BLKmode
6923 && ! (target != 0 && GET_CODE (op0) == MEM
6924 && GET_CODE (target) == MEM
6925 && bitpos % BITS_PER_UNIT == 0))
6926 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6927
6928 if (ext_mode == BLKmode)
6929 {
6930 /* In this case, BITPOS must start at a byte boundary and
6931 TARGET, if specified, must be a MEM. */
6932 if (GET_CODE (op0) != MEM
6933 || (target != 0 && GET_CODE (target) != MEM)
6934 || bitpos % BITS_PER_UNIT != 0)
6935 abort ();
6936
6937 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6938 if (target == 0)
6939 target = assign_temp (type, 0, 1, 1);
6940
6941 emit_block_move (target, op0,
6942 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6943 / BITS_PER_UNIT));
6944
6945 return target;
6946 }
6947
6948 op0 = validize_mem (op0);
6949
6950 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6951 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6952
6953 op0 = extract_bit_field (op0, bitsize, bitpos,
6954 unsignedp, target, ext_mode, ext_mode,
6955 int_size_in_bytes (TREE_TYPE (tem)));
6956
6957 /* If the result is a record type and BITSIZE is narrower than
6958 the mode of OP0, an integral mode, and this is a big endian
6959 machine, we must put the field into the high-order bits. */
6960 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6961 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6962 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6963 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6964 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6965 - bitsize),
6966 op0, 1);
6967
6968 if (mode == BLKmode)
6969 {
6970 rtx new = assign_temp (build_qualified_type
6971 (type_for_mode (ext_mode, 0),
6972 TYPE_QUAL_CONST), 0, 1, 1);
6973
6974 emit_move_insn (new, op0);
6975 op0 = copy_rtx (new);
6976 PUT_MODE (op0, BLKmode);
6977 set_mem_attributes (op0, exp, 1);
6978 }
6979
6980 return op0;
6981 }
6982
6983 /* If the result is BLKmode, use that to access the object
6984 now as well. */
6985 if (mode == BLKmode)
6986 mode1 = BLKmode;
6987
6988 /* Get a reference to just this component. */
6989 if (modifier == EXPAND_CONST_ADDRESS
6990 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6991 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
6992 else
6993 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6994
6995 if (op0 == orig_op0)
6996 op0 = copy_rtx (op0);
6997
6998 set_mem_attributes (op0, exp, 0);
6999 if (GET_CODE (XEXP (op0, 0)) == REG)
7000 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7001
7002 MEM_VOLATILE_P (op0) |= volatilep;
7003 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7004 || modifier == EXPAND_CONST_ADDRESS
7005 || modifier == EXPAND_INITIALIZER)
7006 return op0;
7007 else if (target == 0)
7008 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7009
7010 convert_move (target, op0, unsignedp);
7011 return target;
7012 }
7013
7014 case VTABLE_REF:
7015 {
7016 rtx insn, before = get_last_insn (), vtbl_ref;
7017
7018 /* Evaluate the interior expression. */
7019 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7020 tmode, modifier);
7021
7022 /* Get or create an instruction off which to hang a note. */
7023 if (REG_P (subtarget))
7024 {
7025 target = subtarget;
7026 insn = get_last_insn ();
7027 if (insn == before)
7028 abort ();
7029 if (! INSN_P (insn))
7030 insn = prev_nonnote_insn (insn);
7031 }
7032 else
7033 {
7034 target = gen_reg_rtx (GET_MODE (subtarget));
7035 insn = emit_move_insn (target, subtarget);
7036 }
7037
7038 /* Collect the data for the note. */
7039 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7040 vtbl_ref = plus_constant (vtbl_ref,
7041 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7042 /* Discard the initial CONST that was added. */
7043 vtbl_ref = XEXP (vtbl_ref, 0);
7044
7045 REG_NOTES (insn)
7046 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7047
7048 return target;
7049 }
7050
7051 /* Intended for a reference to a buffer of a file-object in Pascal.
7052 But it's not certain that a special tree code will really be
7053 necessary for these. INDIRECT_REF might work for them. */
7054 case BUFFER_REF:
7055 abort ();
7056
7057 case IN_EXPR:
7058 {
7059 /* Pascal set IN expression.
7060
7061 Algorithm:
7062 rlo = set_low - (set_low%bits_per_word);
7063 the_word = set [ (index - rlo)/bits_per_word ];
7064 bit_index = index % bits_per_word;
7065 bitmask = 1 << bit_index;
7066 return !!(the_word & bitmask); */
7067
7068 tree set = TREE_OPERAND (exp, 0);
7069 tree index = TREE_OPERAND (exp, 1);
7070 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7071 tree set_type = TREE_TYPE (set);
7072 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7073 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7074 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7075 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7076 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7077 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7078 rtx setaddr = XEXP (setval, 0);
7079 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7080 rtx rlow;
7081 rtx diff, quo, rem, addr, bit, result;
7082
7083 /* If domain is empty, answer is no. Likewise if index is constant
7084 and out of bounds. */
7085 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7086 && TREE_CODE (set_low_bound) == INTEGER_CST
7087 && tree_int_cst_lt (set_high_bound, set_low_bound))
7088 || (TREE_CODE (index) == INTEGER_CST
7089 && TREE_CODE (set_low_bound) == INTEGER_CST
7090 && tree_int_cst_lt (index, set_low_bound))
7091 || (TREE_CODE (set_high_bound) == INTEGER_CST
7092 && TREE_CODE (index) == INTEGER_CST
7093 && tree_int_cst_lt (set_high_bound, index))))
7094 return const0_rtx;
7095
7096 if (target == 0)
7097 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7098
7099 /* If we get here, we have to generate the code for both cases
7100 (in range and out of range). */
7101
7102 op0 = gen_label_rtx ();
7103 op1 = gen_label_rtx ();
7104
7105 if (! (GET_CODE (index_val) == CONST_INT
7106 && GET_CODE (lo_r) == CONST_INT))
7107 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7108 GET_MODE (index_val), iunsignedp, op1);
7109
7110 if (! (GET_CODE (index_val) == CONST_INT
7111 && GET_CODE (hi_r) == CONST_INT))
7112 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7113 GET_MODE (index_val), iunsignedp, op1);
7114
7115 /* Calculate the element number of bit zero in the first word
7116 of the set. */
7117 if (GET_CODE (lo_r) == CONST_INT)
7118 rlow = GEN_INT (INTVAL (lo_r)
7119 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7120 else
7121 rlow = expand_binop (index_mode, and_optab, lo_r,
7122 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7123 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7124
7125 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7126 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7127
7128 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7129 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7130 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7131 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7132
7133 addr = memory_address (byte_mode,
7134 expand_binop (index_mode, add_optab, diff,
7135 setaddr, NULL_RTX, iunsignedp,
7136 OPTAB_LIB_WIDEN));
7137
7138 /* Extract the bit we want to examine. */
7139 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7140 gen_rtx_MEM (byte_mode, addr),
7141 make_tree (TREE_TYPE (index), rem),
7142 NULL_RTX, 1);
7143 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7144 GET_MODE (target) == byte_mode ? target : 0,
7145 1, OPTAB_LIB_WIDEN);
7146
7147 if (result != target)
7148 convert_move (target, result, 1);
7149
7150 /* Output the code to handle the out-of-range case. */
7151 emit_jump (op0);
7152 emit_label (op1);
7153 emit_move_insn (target, const0_rtx);
7154 emit_label (op0);
7155 return target;
7156 }
7157
7158 case WITH_CLEANUP_EXPR:
7159 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7160 {
7161 WITH_CLEANUP_EXPR_RTL (exp)
7162 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7163 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7164
7165 /* That's it for this cleanup. */
7166 TREE_OPERAND (exp, 1) = 0;
7167 }
7168 return WITH_CLEANUP_EXPR_RTL (exp);
7169
7170 case CLEANUP_POINT_EXPR:
7171 {
7172 /* Start a new binding layer that will keep track of all cleanup
7173 actions to be performed. */
7174 expand_start_bindings (2);
7175
7176 target_temp_slot_level = temp_slot_level;
7177
7178 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7179 /* If we're going to use this value, load it up now. */
7180 if (! ignore)
7181 op0 = force_not_mem (op0);
7182 preserve_temp_slots (op0);
7183 expand_end_bindings (NULL_TREE, 0, 0);
7184 }
7185 return op0;
7186
7187 case CALL_EXPR:
7188 /* Check for a built-in function. */
7189 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7190 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7191 == FUNCTION_DECL)
7192 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7193 {
7194 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7195 == BUILT_IN_FRONTEND)
7196 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7197 else
7198 return expand_builtin (exp, target, subtarget, tmode, ignore);
7199 }
7200
7201 return expand_call (exp, target, ignore);
7202
7203 case NON_LVALUE_EXPR:
7204 case NOP_EXPR:
7205 case CONVERT_EXPR:
7206 case REFERENCE_EXPR:
7207 if (TREE_OPERAND (exp, 0) == error_mark_node)
7208 return const0_rtx;
7209
7210 if (TREE_CODE (type) == UNION_TYPE)
7211 {
7212 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7213
7214 /* If both input and output are BLKmode, this conversion isn't doing
7215 anything except possibly changing memory attribute. */
7216 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7217 {
7218 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7219 modifier);
7220
7221 result = copy_rtx (result);
7222 set_mem_attributes (result, exp, 0);
7223 return result;
7224 }
7225
7226 if (target == 0)
7227 target = assign_temp (type, 0, 1, 1);
7228
7229 if (GET_CODE (target) == MEM)
7230 /* Store data into beginning of memory target. */
7231 store_expr (TREE_OPERAND (exp, 0),
7232 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7233
7234 else if (GET_CODE (target) == REG)
7235 /* Store this field into a union of the proper type. */
7236 store_field (target,
7237 MIN ((int_size_in_bytes (TREE_TYPE
7238 (TREE_OPERAND (exp, 0)))
7239 * BITS_PER_UNIT),
7240 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7241 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7242 VOIDmode, 0, type, 0);
7243 else
7244 abort ();
7245
7246 /* Return the entire union. */
7247 return target;
7248 }
7249
7250 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7251 {
7252 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7253 modifier);
7254
7255 /* If the signedness of the conversion differs and OP0 is
7256 a promoted SUBREG, clear that indication since we now
7257 have to do the proper extension. */
7258 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7259 && GET_CODE (op0) == SUBREG)
7260 SUBREG_PROMOTED_VAR_P (op0) = 0;
7261
7262 return op0;
7263 }
7264
7265 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7266 if (GET_MODE (op0) == mode)
7267 return op0;
7268
7269 /* If OP0 is a constant, just convert it into the proper mode. */
7270 if (CONSTANT_P (op0))
7271 return
7272 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7273 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7274
7275 if (modifier == EXPAND_INITIALIZER)
7276 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7277
7278 if (target == 0)
7279 return
7280 convert_to_mode (mode, op0,
7281 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7282 else
7283 convert_move (target, op0,
7284 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7285 return target;
7286
7287 case VIEW_CONVERT_EXPR:
7288 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7289
7290 /* If the input and output modes are both the same, we are done.
7291 Otherwise, if neither mode is BLKmode and both are within a word, we
7292 can use gen_lowpart. If neither is true, make sure the operand is
7293 in memory and convert the MEM to the new mode. */
7294 if (TYPE_MODE (type) == GET_MODE (op0))
7295 ;
7296 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7297 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7298 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7299 op0 = gen_lowpart (TYPE_MODE (type), op0);
7300 else if (GET_CODE (op0) != MEM)
7301 {
7302 /* If the operand is not a MEM, force it into memory. Since we
7303 are going to be be changing the mode of the MEM, don't call
7304 force_const_mem for constants because we don't allow pool
7305 constants to change mode. */
7306 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7307
7308 if (TREE_ADDRESSABLE (exp))
7309 abort ();
7310
7311 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7312 target
7313 = assign_stack_temp_for_type
7314 (TYPE_MODE (inner_type),
7315 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7316
7317 emit_move_insn (target, op0);
7318 op0 = target;
7319 }
7320
7321 /* At this point, OP0 is in the correct mode. If the output type is such
7322 that the operand is known to be aligned, indicate that it is.
7323 Otherwise, we need only be concerned about alignment for non-BLKmode
7324 results. */
7325 if (GET_CODE (op0) == MEM)
7326 {
7327 op0 = copy_rtx (op0);
7328
7329 if (TYPE_ALIGN_OK (type))
7330 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7331 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7332 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7333 {
7334 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7335 HOST_WIDE_INT temp_size
7336 = MAX (int_size_in_bytes (inner_type),
7337 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7338 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7339 temp_size, 0, type);
7340 rtx new_with_op0_mode = copy_rtx (new);
7341
7342 if (TREE_ADDRESSABLE (exp))
7343 abort ();
7344
7345 PUT_MODE (new_with_op0_mode, GET_MODE (op0));
7346 if (GET_MODE (op0) == BLKmode)
7347 emit_block_move (new_with_op0_mode, op0,
7348 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7349 else
7350 emit_move_insn (new_with_op0_mode, op0);
7351
7352 op0 = new;
7353 }
7354
7355 PUT_MODE (op0, TYPE_MODE (type));
7356 }
7357
7358 return op0;
7359
7360 case PLUS_EXPR:
7361 /* We come here from MINUS_EXPR when the second operand is a
7362 constant. */
7363 plus_expr:
7364 this_optab = ! unsignedp && flag_trapv
7365 && (GET_MODE_CLASS (mode) == MODE_INT)
7366 ? addv_optab : add_optab;
7367
7368 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7369 something else, make sure we add the register to the constant and
7370 then to the other thing. This case can occur during strength
7371 reduction and doing it this way will produce better code if the
7372 frame pointer or argument pointer is eliminated.
7373
7374 fold-const.c will ensure that the constant is always in the inner
7375 PLUS_EXPR, so the only case we need to do anything about is if
7376 sp, ap, or fp is our second argument, in which case we must swap
7377 the innermost first argument and our second argument. */
7378
7379 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7380 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7381 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7382 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7383 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7384 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7385 {
7386 tree t = TREE_OPERAND (exp, 1);
7387
7388 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7389 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7390 }
7391
7392 /* If the result is to be ptr_mode and we are adding an integer to
7393 something, we might be forming a constant. So try to use
7394 plus_constant. If it produces a sum and we can't accept it,
7395 use force_operand. This allows P = &ARR[const] to generate
7396 efficient code on machines where a SYMBOL_REF is not a valid
7397 address.
7398
7399 If this is an EXPAND_SUM call, always return the sum. */
7400 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7401 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7402 {
7403 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7404 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7405 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7406 {
7407 rtx constant_part;
7408
7409 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7410 EXPAND_SUM);
7411 /* Use immed_double_const to ensure that the constant is
7412 truncated according to the mode of OP1, then sign extended
7413 to a HOST_WIDE_INT. Using the constant directly can result
7414 in non-canonical RTL in a 64x32 cross compile. */
7415 constant_part
7416 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7417 (HOST_WIDE_INT) 0,
7418 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7419 op1 = plus_constant (op1, INTVAL (constant_part));
7420 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7421 op1 = force_operand (op1, target);
7422 return op1;
7423 }
7424
7425 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7426 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7427 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7428 {
7429 rtx constant_part;
7430
7431 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7432 EXPAND_SUM);
7433 if (! CONSTANT_P (op0))
7434 {
7435 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7436 VOIDmode, modifier);
7437 /* Don't go to both_summands if modifier
7438 says it's not right to return a PLUS. */
7439 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7440 goto binop2;
7441 goto both_summands;
7442 }
7443 /* Use immed_double_const to ensure that the constant is
7444 truncated according to the mode of OP1, then sign extended
7445 to a HOST_WIDE_INT. Using the constant directly can result
7446 in non-canonical RTL in a 64x32 cross compile. */
7447 constant_part
7448 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7449 (HOST_WIDE_INT) 0,
7450 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7451 op0 = plus_constant (op0, INTVAL (constant_part));
7452 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7453 op0 = force_operand (op0, target);
7454 return op0;
7455 }
7456 }
7457
7458 /* No sense saving up arithmetic to be done
7459 if it's all in the wrong mode to form part of an address.
7460 And force_operand won't know whether to sign-extend or
7461 zero-extend. */
7462 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7463 || mode != ptr_mode)
7464 goto binop;
7465
7466 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7467 subtarget = 0;
7468
7469 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7470 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7471
7472 both_summands:
7473 /* Make sure any term that's a sum with a constant comes last. */
7474 if (GET_CODE (op0) == PLUS
7475 && CONSTANT_P (XEXP (op0, 1)))
7476 {
7477 temp = op0;
7478 op0 = op1;
7479 op1 = temp;
7480 }
7481 /* If adding to a sum including a constant,
7482 associate it to put the constant outside. */
7483 if (GET_CODE (op1) == PLUS
7484 && CONSTANT_P (XEXP (op1, 1)))
7485 {
7486 rtx constant_term = const0_rtx;
7487
7488 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7489 if (temp != 0)
7490 op0 = temp;
7491 /* Ensure that MULT comes first if there is one. */
7492 else if (GET_CODE (op0) == MULT)
7493 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7494 else
7495 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7496
7497 /* Let's also eliminate constants from op0 if possible. */
7498 op0 = eliminate_constant_term (op0, &constant_term);
7499
7500 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7501 their sum should be a constant. Form it into OP1, since the
7502 result we want will then be OP0 + OP1. */
7503
7504 temp = simplify_binary_operation (PLUS, mode, constant_term,
7505 XEXP (op1, 1));
7506 if (temp != 0)
7507 op1 = temp;
7508 else
7509 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7510 }
7511
7512 /* Put a constant term last and put a multiplication first. */
7513 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7514 temp = op1, op1 = op0, op0 = temp;
7515
7516 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7517 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7518
7519 case MINUS_EXPR:
7520 /* For initializers, we are allowed to return a MINUS of two
7521 symbolic constants. Here we handle all cases when both operands
7522 are constant. */
7523 /* Handle difference of two symbolic constants,
7524 for the sake of an initializer. */
7525 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7526 && really_constant_p (TREE_OPERAND (exp, 0))
7527 && really_constant_p (TREE_OPERAND (exp, 1)))
7528 {
7529 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7530 modifier);
7531 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7532 modifier);
7533
7534 /* If the last operand is a CONST_INT, use plus_constant of
7535 the negated constant. Else make the MINUS. */
7536 if (GET_CODE (op1) == CONST_INT)
7537 return plus_constant (op0, - INTVAL (op1));
7538 else
7539 return gen_rtx_MINUS (mode, op0, op1);
7540 }
7541 /* Convert A - const to A + (-const). */
7542 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7543 {
7544 tree negated = fold (build1 (NEGATE_EXPR, type,
7545 TREE_OPERAND (exp, 1)));
7546
7547 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7548 /* If we can't negate the constant in TYPE, leave it alone and
7549 expand_binop will negate it for us. We used to try to do it
7550 here in the signed version of TYPE, but that doesn't work
7551 on POINTER_TYPEs. */;
7552 else
7553 {
7554 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7555 goto plus_expr;
7556 }
7557 }
7558 this_optab = ! unsignedp && flag_trapv
7559 && (GET_MODE_CLASS(mode) == MODE_INT)
7560 ? subv_optab : sub_optab;
7561 goto binop;
7562
7563 case MULT_EXPR:
7564 /* If first operand is constant, swap them.
7565 Thus the following special case checks need only
7566 check the second operand. */
7567 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7568 {
7569 tree t1 = TREE_OPERAND (exp, 0);
7570 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7571 TREE_OPERAND (exp, 1) = t1;
7572 }
7573
7574 /* Attempt to return something suitable for generating an
7575 indexed address, for machines that support that. */
7576
7577 if (modifier == EXPAND_SUM && mode == ptr_mode
7578 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7579 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7580 {
7581 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7582 EXPAND_SUM);
7583
7584 /* Apply distributive law if OP0 is x+c. */
7585 if (GET_CODE (op0) == PLUS
7586 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7587 return
7588 gen_rtx_PLUS
7589 (mode,
7590 gen_rtx_MULT
7591 (mode, XEXP (op0, 0),
7592 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7593 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7594 * INTVAL (XEXP (op0, 1))));
7595
7596 if (GET_CODE (op0) != REG)
7597 op0 = force_operand (op0, NULL_RTX);
7598 if (GET_CODE (op0) != REG)
7599 op0 = copy_to_mode_reg (mode, op0);
7600
7601 return
7602 gen_rtx_MULT (mode, op0,
7603 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7604 }
7605
7606 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7607 subtarget = 0;
7608
7609 /* Check for multiplying things that have been extended
7610 from a narrower type. If this machine supports multiplying
7611 in that narrower type with a result in the desired type,
7612 do it that way, and avoid the explicit type-conversion. */
7613 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7614 && TREE_CODE (type) == INTEGER_TYPE
7615 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7616 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7617 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7618 && int_fits_type_p (TREE_OPERAND (exp, 1),
7619 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7620 /* Don't use a widening multiply if a shift will do. */
7621 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7622 > HOST_BITS_PER_WIDE_INT)
7623 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7624 ||
7625 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7626 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7627 ==
7628 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7629 /* If both operands are extended, they must either both
7630 be zero-extended or both be sign-extended. */
7631 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7632 ==
7633 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7634 {
7635 enum machine_mode innermode
7636 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7637 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7638 ? smul_widen_optab : umul_widen_optab);
7639 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7640 ? umul_widen_optab : smul_widen_optab);
7641 if (mode == GET_MODE_WIDER_MODE (innermode))
7642 {
7643 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7644 {
7645 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7646 NULL_RTX, VOIDmode, 0);
7647 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7648 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7649 VOIDmode, 0);
7650 else
7651 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7652 NULL_RTX, VOIDmode, 0);
7653 goto binop2;
7654 }
7655 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7656 && innermode == word_mode)
7657 {
7658 rtx htem;
7659 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7660 NULL_RTX, VOIDmode, 0);
7661 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7662 op1 = convert_modes (innermode, mode,
7663 expand_expr (TREE_OPERAND (exp, 1),
7664 NULL_RTX, VOIDmode, 0),
7665 unsignedp);
7666 else
7667 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7668 NULL_RTX, VOIDmode, 0);
7669 temp = expand_binop (mode, other_optab, op0, op1, target,
7670 unsignedp, OPTAB_LIB_WIDEN);
7671 htem = expand_mult_highpart_adjust (innermode,
7672 gen_highpart (innermode, temp),
7673 op0, op1,
7674 gen_highpart (innermode, temp),
7675 unsignedp);
7676 emit_move_insn (gen_highpart (innermode, temp), htem);
7677 return temp;
7678 }
7679 }
7680 }
7681 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7682 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7683 return expand_mult (mode, op0, op1, target, unsignedp);
7684
7685 case TRUNC_DIV_EXPR:
7686 case FLOOR_DIV_EXPR:
7687 case CEIL_DIV_EXPR:
7688 case ROUND_DIV_EXPR:
7689 case EXACT_DIV_EXPR:
7690 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7691 subtarget = 0;
7692 /* Possible optimization: compute the dividend with EXPAND_SUM
7693 then if the divisor is constant can optimize the case
7694 where some terms of the dividend have coeffs divisible by it. */
7695 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7696 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7697 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7698
7699 case RDIV_EXPR:
7700 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7701 expensive divide. If not, combine will rebuild the original
7702 computation. */
7703 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7704 && !real_onep (TREE_OPERAND (exp, 0)))
7705 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7706 build (RDIV_EXPR, type,
7707 build_real (type, dconst1),
7708 TREE_OPERAND (exp, 1))),
7709 target, tmode, unsignedp);
7710 this_optab = sdiv_optab;
7711 goto binop;
7712
7713 case TRUNC_MOD_EXPR:
7714 case FLOOR_MOD_EXPR:
7715 case CEIL_MOD_EXPR:
7716 case ROUND_MOD_EXPR:
7717 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7718 subtarget = 0;
7719 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7720 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7721 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7722
7723 case FIX_ROUND_EXPR:
7724 case FIX_FLOOR_EXPR:
7725 case FIX_CEIL_EXPR:
7726 abort (); /* Not used for C. */
7727
7728 case FIX_TRUNC_EXPR:
7729 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7730 if (target == 0)
7731 target = gen_reg_rtx (mode);
7732 expand_fix (target, op0, unsignedp);
7733 return target;
7734
7735 case FLOAT_EXPR:
7736 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7737 if (target == 0)
7738 target = gen_reg_rtx (mode);
7739 /* expand_float can't figure out what to do if FROM has VOIDmode.
7740 So give it the correct mode. With -O, cse will optimize this. */
7741 if (GET_MODE (op0) == VOIDmode)
7742 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7743 op0);
7744 expand_float (target, op0,
7745 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7746 return target;
7747
7748 case NEGATE_EXPR:
7749 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7750 temp = expand_unop (mode,
7751 ! unsignedp && flag_trapv
7752 && (GET_MODE_CLASS(mode) == MODE_INT)
7753 ? negv_optab : neg_optab, op0, target, 0);
7754 if (temp == 0)
7755 abort ();
7756 return temp;
7757
7758 case ABS_EXPR:
7759 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7760
7761 /* Handle complex values specially. */
7762 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7763 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7764 return expand_complex_abs (mode, op0, target, unsignedp);
7765
7766 /* Unsigned abs is simply the operand. Testing here means we don't
7767 risk generating incorrect code below. */
7768 if (TREE_UNSIGNED (type))
7769 return op0;
7770
7771 return expand_abs (mode, op0, target, unsignedp,
7772 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7773
7774 case MAX_EXPR:
7775 case MIN_EXPR:
7776 target = original_target;
7777 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7778 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7779 || GET_MODE (target) != mode
7780 || (GET_CODE (target) == REG
7781 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7782 target = gen_reg_rtx (mode);
7783 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7784 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7785
7786 /* First try to do it with a special MIN or MAX instruction.
7787 If that does not win, use a conditional jump to select the proper
7788 value. */
7789 this_optab = (TREE_UNSIGNED (type)
7790 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7791 : (code == MIN_EXPR ? smin_optab : smax_optab));
7792
7793 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7794 OPTAB_WIDEN);
7795 if (temp != 0)
7796 return temp;
7797
7798 /* At this point, a MEM target is no longer useful; we will get better
7799 code without it. */
7800
7801 if (GET_CODE (target) == MEM)
7802 target = gen_reg_rtx (mode);
7803
7804 if (target != op0)
7805 emit_move_insn (target, op0);
7806
7807 op0 = gen_label_rtx ();
7808
7809 /* If this mode is an integer too wide to compare properly,
7810 compare word by word. Rely on cse to optimize constant cases. */
7811 if (GET_MODE_CLASS (mode) == MODE_INT
7812 && ! can_compare_p (GE, mode, ccp_jump))
7813 {
7814 if (code == MAX_EXPR)
7815 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7816 target, op1, NULL_RTX, op0);
7817 else
7818 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7819 op1, target, NULL_RTX, op0);
7820 }
7821 else
7822 {
7823 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7824 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7825 unsignedp, mode, NULL_RTX, NULL_RTX,
7826 op0);
7827 }
7828 emit_move_insn (target, op1);
7829 emit_label (op0);
7830 return target;
7831
7832 case BIT_NOT_EXPR:
7833 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7834 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7835 if (temp == 0)
7836 abort ();
7837 return temp;
7838
7839 case FFS_EXPR:
7840 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7841 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7842 if (temp == 0)
7843 abort ();
7844 return temp;
7845
7846 /* ??? Can optimize bitwise operations with one arg constant.
7847 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7848 and (a bitwise1 b) bitwise2 b (etc)
7849 but that is probably not worth while. */
7850
7851 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7852 boolean values when we want in all cases to compute both of them. In
7853 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7854 as actual zero-or-1 values and then bitwise anding. In cases where
7855 there cannot be any side effects, better code would be made by
7856 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7857 how to recognize those cases. */
7858
7859 case TRUTH_AND_EXPR:
7860 case BIT_AND_EXPR:
7861 this_optab = and_optab;
7862 goto binop;
7863
7864 case TRUTH_OR_EXPR:
7865 case BIT_IOR_EXPR:
7866 this_optab = ior_optab;
7867 goto binop;
7868
7869 case TRUTH_XOR_EXPR:
7870 case BIT_XOR_EXPR:
7871 this_optab = xor_optab;
7872 goto binop;
7873
7874 case LSHIFT_EXPR:
7875 case RSHIFT_EXPR:
7876 case LROTATE_EXPR:
7877 case RROTATE_EXPR:
7878 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7879 subtarget = 0;
7880 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7881 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7882 unsignedp);
7883
7884 /* Could determine the answer when only additive constants differ. Also,
7885 the addition of one can be handled by changing the condition. */
7886 case LT_EXPR:
7887 case LE_EXPR:
7888 case GT_EXPR:
7889 case GE_EXPR:
7890 case EQ_EXPR:
7891 case NE_EXPR:
7892 case UNORDERED_EXPR:
7893 case ORDERED_EXPR:
7894 case UNLT_EXPR:
7895 case UNLE_EXPR:
7896 case UNGT_EXPR:
7897 case UNGE_EXPR:
7898 case UNEQ_EXPR:
7899 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7900 if (temp != 0)
7901 return temp;
7902
7903 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7904 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7905 && original_target
7906 && GET_CODE (original_target) == REG
7907 && (GET_MODE (original_target)
7908 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7909 {
7910 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7911 VOIDmode, 0);
7912
7913 if (temp != original_target)
7914 temp = copy_to_reg (temp);
7915
7916 op1 = gen_label_rtx ();
7917 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7918 GET_MODE (temp), unsignedp, op1);
7919 emit_move_insn (temp, const1_rtx);
7920 emit_label (op1);
7921 return temp;
7922 }
7923
7924 /* If no set-flag instruction, must generate a conditional
7925 store into a temporary variable. Drop through
7926 and handle this like && and ||. */
7927
7928 case TRUTH_ANDIF_EXPR:
7929 case TRUTH_ORIF_EXPR:
7930 if (! ignore
7931 && (target == 0 || ! safe_from_p (target, exp, 1)
7932 /* Make sure we don't have a hard reg (such as function's return
7933 value) live across basic blocks, if not optimizing. */
7934 || (!optimize && GET_CODE (target) == REG
7935 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7936 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7937
7938 if (target)
7939 emit_clr_insn (target);
7940
7941 op1 = gen_label_rtx ();
7942 jumpifnot (exp, op1);
7943
7944 if (target)
7945 emit_0_to_1_insn (target);
7946
7947 emit_label (op1);
7948 return ignore ? const0_rtx : target;
7949
7950 case TRUTH_NOT_EXPR:
7951 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7952 /* The parser is careful to generate TRUTH_NOT_EXPR
7953 only with operands that are always zero or one. */
7954 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7955 target, 1, OPTAB_LIB_WIDEN);
7956 if (temp == 0)
7957 abort ();
7958 return temp;
7959
7960 case COMPOUND_EXPR:
7961 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7962 emit_queue ();
7963 return expand_expr (TREE_OPERAND (exp, 1),
7964 (ignore ? const0_rtx : target),
7965 VOIDmode, 0);
7966
7967 case COND_EXPR:
7968 /* If we would have a "singleton" (see below) were it not for a
7969 conversion in each arm, bring that conversion back out. */
7970 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7971 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7972 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7973 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7974 {
7975 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7976 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7977
7978 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7979 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7980 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7981 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7982 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7983 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7984 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7985 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7986 return expand_expr (build1 (NOP_EXPR, type,
7987 build (COND_EXPR, TREE_TYPE (iftrue),
7988 TREE_OPERAND (exp, 0),
7989 iftrue, iffalse)),
7990 target, tmode, modifier);
7991 }
7992
7993 {
7994 /* Note that COND_EXPRs whose type is a structure or union
7995 are required to be constructed to contain assignments of
7996 a temporary variable, so that we can evaluate them here
7997 for side effect only. If type is void, we must do likewise. */
7998
7999 /* If an arm of the branch requires a cleanup,
8000 only that cleanup is performed. */
8001
8002 tree singleton = 0;
8003 tree binary_op = 0, unary_op = 0;
8004
8005 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8006 convert it to our mode, if necessary. */
8007 if (integer_onep (TREE_OPERAND (exp, 1))
8008 && integer_zerop (TREE_OPERAND (exp, 2))
8009 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8010 {
8011 if (ignore)
8012 {
8013 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8014 modifier);
8015 return const0_rtx;
8016 }
8017
8018 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8019 if (GET_MODE (op0) == mode)
8020 return op0;
8021
8022 if (target == 0)
8023 target = gen_reg_rtx (mode);
8024 convert_move (target, op0, unsignedp);
8025 return target;
8026 }
8027
8028 /* Check for X ? A + B : A. If we have this, we can copy A to the
8029 output and conditionally add B. Similarly for unary operations.
8030 Don't do this if X has side-effects because those side effects
8031 might affect A or B and the "?" operation is a sequence point in
8032 ANSI. (operand_equal_p tests for side effects.) */
8033
8034 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8035 && operand_equal_p (TREE_OPERAND (exp, 2),
8036 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8037 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8038 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8039 && operand_equal_p (TREE_OPERAND (exp, 1),
8040 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8041 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8042 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8043 && operand_equal_p (TREE_OPERAND (exp, 2),
8044 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8045 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8046 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8047 && operand_equal_p (TREE_OPERAND (exp, 1),
8048 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8049 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8050
8051 /* If we are not to produce a result, we have no target. Otherwise,
8052 if a target was specified use it; it will not be used as an
8053 intermediate target unless it is safe. If no target, use a
8054 temporary. */
8055
8056 if (ignore)
8057 temp = 0;
8058 else if (original_target
8059 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8060 || (singleton && GET_CODE (original_target) == REG
8061 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8062 && original_target == var_rtx (singleton)))
8063 && GET_MODE (original_target) == mode
8064 #ifdef HAVE_conditional_move
8065 && (! can_conditionally_move_p (mode)
8066 || GET_CODE (original_target) == REG
8067 || TREE_ADDRESSABLE (type))
8068 #endif
8069 && (GET_CODE (original_target) != MEM
8070 || TREE_ADDRESSABLE (type)))
8071 temp = original_target;
8072 else if (TREE_ADDRESSABLE (type))
8073 abort ();
8074 else
8075 temp = assign_temp (type, 0, 0, 1);
8076
8077 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8078 do the test of X as a store-flag operation, do this as
8079 A + ((X != 0) << log C). Similarly for other simple binary
8080 operators. Only do for C == 1 if BRANCH_COST is low. */
8081 if (temp && singleton && binary_op
8082 && (TREE_CODE (binary_op) == PLUS_EXPR
8083 || TREE_CODE (binary_op) == MINUS_EXPR
8084 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8085 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8086 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8087 : integer_onep (TREE_OPERAND (binary_op, 1)))
8088 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8089 {
8090 rtx result;
8091 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8092 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8093 ? addv_optab : add_optab)
8094 : TREE_CODE (binary_op) == MINUS_EXPR
8095 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8096 ? subv_optab : sub_optab)
8097 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8098 : xor_optab);
8099
8100 /* If we had X ? A : A + 1, do this as A + (X == 0).
8101
8102 We have to invert the truth value here and then put it
8103 back later if do_store_flag fails. We cannot simply copy
8104 TREE_OPERAND (exp, 0) to another variable and modify that
8105 because invert_truthvalue can modify the tree pointed to
8106 by its argument. */
8107 if (singleton == TREE_OPERAND (exp, 1))
8108 TREE_OPERAND (exp, 0)
8109 = invert_truthvalue (TREE_OPERAND (exp, 0));
8110
8111 result = do_store_flag (TREE_OPERAND (exp, 0),
8112 (safe_from_p (temp, singleton, 1)
8113 ? temp : NULL_RTX),
8114 mode, BRANCH_COST <= 1);
8115
8116 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8117 result = expand_shift (LSHIFT_EXPR, mode, result,
8118 build_int_2 (tree_log2
8119 (TREE_OPERAND
8120 (binary_op, 1)),
8121 0),
8122 (safe_from_p (temp, singleton, 1)
8123 ? temp : NULL_RTX), 0);
8124
8125 if (result)
8126 {
8127 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8128 return expand_binop (mode, boptab, op1, result, temp,
8129 unsignedp, OPTAB_LIB_WIDEN);
8130 }
8131 else if (singleton == TREE_OPERAND (exp, 1))
8132 TREE_OPERAND (exp, 0)
8133 = invert_truthvalue (TREE_OPERAND (exp, 0));
8134 }
8135
8136 do_pending_stack_adjust ();
8137 NO_DEFER_POP;
8138 op0 = gen_label_rtx ();
8139
8140 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8141 {
8142 if (temp != 0)
8143 {
8144 /* If the target conflicts with the other operand of the
8145 binary op, we can't use it. Also, we can't use the target
8146 if it is a hard register, because evaluating the condition
8147 might clobber it. */
8148 if ((binary_op
8149 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8150 || (GET_CODE (temp) == REG
8151 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8152 temp = gen_reg_rtx (mode);
8153 store_expr (singleton, temp, 0);
8154 }
8155 else
8156 expand_expr (singleton,
8157 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8158 if (singleton == TREE_OPERAND (exp, 1))
8159 jumpif (TREE_OPERAND (exp, 0), op0);
8160 else
8161 jumpifnot (TREE_OPERAND (exp, 0), op0);
8162
8163 start_cleanup_deferral ();
8164 if (binary_op && temp == 0)
8165 /* Just touch the other operand. */
8166 expand_expr (TREE_OPERAND (binary_op, 1),
8167 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8168 else if (binary_op)
8169 store_expr (build (TREE_CODE (binary_op), type,
8170 make_tree (type, temp),
8171 TREE_OPERAND (binary_op, 1)),
8172 temp, 0);
8173 else
8174 store_expr (build1 (TREE_CODE (unary_op), type,
8175 make_tree (type, temp)),
8176 temp, 0);
8177 op1 = op0;
8178 }
8179 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8180 comparison operator. If we have one of these cases, set the
8181 output to A, branch on A (cse will merge these two references),
8182 then set the output to FOO. */
8183 else if (temp
8184 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8185 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8186 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8187 TREE_OPERAND (exp, 1), 0)
8188 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8189 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8190 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8191 {
8192 if (GET_CODE (temp) == REG
8193 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8194 temp = gen_reg_rtx (mode);
8195 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8196 jumpif (TREE_OPERAND (exp, 0), op0);
8197
8198 start_cleanup_deferral ();
8199 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8200 op1 = op0;
8201 }
8202 else if (temp
8203 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8204 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8205 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8206 TREE_OPERAND (exp, 2), 0)
8207 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8208 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8209 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8210 {
8211 if (GET_CODE (temp) == REG
8212 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8213 temp = gen_reg_rtx (mode);
8214 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8215 jumpifnot (TREE_OPERAND (exp, 0), op0);
8216
8217 start_cleanup_deferral ();
8218 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8219 op1 = op0;
8220 }
8221 else
8222 {
8223 op1 = gen_label_rtx ();
8224 jumpifnot (TREE_OPERAND (exp, 0), op0);
8225
8226 start_cleanup_deferral ();
8227
8228 /* One branch of the cond can be void, if it never returns. For
8229 example A ? throw : E */
8230 if (temp != 0
8231 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8232 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8233 else
8234 expand_expr (TREE_OPERAND (exp, 1),
8235 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8236 end_cleanup_deferral ();
8237 emit_queue ();
8238 emit_jump_insn (gen_jump (op1));
8239 emit_barrier ();
8240 emit_label (op0);
8241 start_cleanup_deferral ();
8242 if (temp != 0
8243 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8244 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8245 else
8246 expand_expr (TREE_OPERAND (exp, 2),
8247 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8248 }
8249
8250 end_cleanup_deferral ();
8251
8252 emit_queue ();
8253 emit_label (op1);
8254 OK_DEFER_POP;
8255
8256 return temp;
8257 }
8258
8259 case TARGET_EXPR:
8260 {
8261 /* Something needs to be initialized, but we didn't know
8262 where that thing was when building the tree. For example,
8263 it could be the return value of a function, or a parameter
8264 to a function which lays down in the stack, or a temporary
8265 variable which must be passed by reference.
8266
8267 We guarantee that the expression will either be constructed
8268 or copied into our original target. */
8269
8270 tree slot = TREE_OPERAND (exp, 0);
8271 tree cleanups = NULL_TREE;
8272 tree exp1;
8273
8274 if (TREE_CODE (slot) != VAR_DECL)
8275 abort ();
8276
8277 if (! ignore)
8278 target = original_target;
8279
8280 /* Set this here so that if we get a target that refers to a
8281 register variable that's already been used, put_reg_into_stack
8282 knows that it should fix up those uses. */
8283 TREE_USED (slot) = 1;
8284
8285 if (target == 0)
8286 {
8287 if (DECL_RTL_SET_P (slot))
8288 {
8289 target = DECL_RTL (slot);
8290 /* If we have already expanded the slot, so don't do
8291 it again. (mrs) */
8292 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8293 return target;
8294 }
8295 else
8296 {
8297 target = assign_temp (type, 2, 0, 1);
8298 /* All temp slots at this level must not conflict. */
8299 preserve_temp_slots (target);
8300 SET_DECL_RTL (slot, target);
8301 if (TREE_ADDRESSABLE (slot))
8302 put_var_into_stack (slot);
8303
8304 /* Since SLOT is not known to the called function
8305 to belong to its stack frame, we must build an explicit
8306 cleanup. This case occurs when we must build up a reference
8307 to pass the reference as an argument. In this case,
8308 it is very likely that such a reference need not be
8309 built here. */
8310
8311 if (TREE_OPERAND (exp, 2) == 0)
8312 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8313 cleanups = TREE_OPERAND (exp, 2);
8314 }
8315 }
8316 else
8317 {
8318 /* This case does occur, when expanding a parameter which
8319 needs to be constructed on the stack. The target
8320 is the actual stack address that we want to initialize.
8321 The function we call will perform the cleanup in this case. */
8322
8323 /* If we have already assigned it space, use that space,
8324 not target that we were passed in, as our target
8325 parameter is only a hint. */
8326 if (DECL_RTL_SET_P (slot))
8327 {
8328 target = DECL_RTL (slot);
8329 /* If we have already expanded the slot, so don't do
8330 it again. (mrs) */
8331 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8332 return target;
8333 }
8334 else
8335 {
8336 SET_DECL_RTL (slot, target);
8337 /* If we must have an addressable slot, then make sure that
8338 the RTL that we just stored in slot is OK. */
8339 if (TREE_ADDRESSABLE (slot))
8340 put_var_into_stack (slot);
8341 }
8342 }
8343
8344 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8345 /* Mark it as expanded. */
8346 TREE_OPERAND (exp, 1) = NULL_TREE;
8347
8348 store_expr (exp1, target, 0);
8349
8350 expand_decl_cleanup (NULL_TREE, cleanups);
8351
8352 return target;
8353 }
8354
8355 case INIT_EXPR:
8356 {
8357 tree lhs = TREE_OPERAND (exp, 0);
8358 tree rhs = TREE_OPERAND (exp, 1);
8359
8360 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8361 return temp;
8362 }
8363
8364 case MODIFY_EXPR:
8365 {
8366 /* If lhs is complex, expand calls in rhs before computing it.
8367 That's so we don't compute a pointer and save it over a
8368 call. If lhs is simple, compute it first so we can give it
8369 as a target if the rhs is just a call. This avoids an
8370 extra temp and copy and that prevents a partial-subsumption
8371 which makes bad code. Actually we could treat
8372 component_ref's of vars like vars. */
8373
8374 tree lhs = TREE_OPERAND (exp, 0);
8375 tree rhs = TREE_OPERAND (exp, 1);
8376
8377 temp = 0;
8378
8379 /* Check for |= or &= of a bitfield of size one into another bitfield
8380 of size 1. In this case, (unless we need the result of the
8381 assignment) we can do this more efficiently with a
8382 test followed by an assignment, if necessary.
8383
8384 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8385 things change so we do, this code should be enhanced to
8386 support it. */
8387 if (ignore
8388 && TREE_CODE (lhs) == COMPONENT_REF
8389 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8390 || TREE_CODE (rhs) == BIT_AND_EXPR)
8391 && TREE_OPERAND (rhs, 0) == lhs
8392 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8393 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8394 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8395 {
8396 rtx label = gen_label_rtx ();
8397
8398 do_jump (TREE_OPERAND (rhs, 1),
8399 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8400 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8401 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8402 (TREE_CODE (rhs) == BIT_IOR_EXPR
8403 ? integer_one_node
8404 : integer_zero_node)),
8405 0, 0);
8406 do_pending_stack_adjust ();
8407 emit_label (label);
8408 return const0_rtx;
8409 }
8410
8411 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8412
8413 return temp;
8414 }
8415
8416 case RETURN_EXPR:
8417 if (!TREE_OPERAND (exp, 0))
8418 expand_null_return ();
8419 else
8420 expand_return (TREE_OPERAND (exp, 0));
8421 return const0_rtx;
8422
8423 case PREINCREMENT_EXPR:
8424 case PREDECREMENT_EXPR:
8425 return expand_increment (exp, 0, ignore);
8426
8427 case POSTINCREMENT_EXPR:
8428 case POSTDECREMENT_EXPR:
8429 /* Faster to treat as pre-increment if result is not used. */
8430 return expand_increment (exp, ! ignore, ignore);
8431
8432 case ADDR_EXPR:
8433 /* Are we taking the address of a nested function? */
8434 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8435 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8436 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8437 && ! TREE_STATIC (exp))
8438 {
8439 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8440 op0 = force_operand (op0, target);
8441 }
8442 /* If we are taking the address of something erroneous, just
8443 return a zero. */
8444 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8445 return const0_rtx;
8446 /* If we are taking the address of a constant and are at the
8447 top level, we have to use output_constant_def since we can't
8448 call force_const_mem at top level. */
8449 else if (cfun == 0
8450 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8451 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8452 == 'c')))
8453 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8454 else
8455 {
8456 /* We make sure to pass const0_rtx down if we came in with
8457 ignore set, to avoid doing the cleanups twice for something. */
8458 op0 = expand_expr (TREE_OPERAND (exp, 0),
8459 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8460 (modifier == EXPAND_INITIALIZER
8461 ? modifier : EXPAND_CONST_ADDRESS));
8462
8463 /* If we are going to ignore the result, OP0 will have been set
8464 to const0_rtx, so just return it. Don't get confused and
8465 think we are taking the address of the constant. */
8466 if (ignore)
8467 return op0;
8468
8469 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8470 clever and returns a REG when given a MEM. */
8471 op0 = protect_from_queue (op0, 1);
8472
8473 /* We would like the object in memory. If it is a constant, we can
8474 have it be statically allocated into memory. For a non-constant,
8475 we need to allocate some memory and store the value into it. */
8476
8477 if (CONSTANT_P (op0))
8478 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8479 op0);
8480 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8481 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8482 || GET_CODE (op0) == PARALLEL)
8483 {
8484 /* If this object is in a register, it must can't be BLKmode. */
8485 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8486 tree nt = build_qualified_type (inner_type,
8487 (TYPE_QUALS (inner_type)
8488 | TYPE_QUAL_CONST));
8489 rtx memloc = assign_temp (nt, 1, 1, 1);
8490
8491 if (GET_CODE (op0) == PARALLEL)
8492 /* Handle calls that pass values in multiple non-contiguous
8493 locations. The Irix 6 ABI has examples of this. */
8494 emit_group_store (memloc, op0, int_size_in_bytes (inner_type));
8495 else
8496 emit_move_insn (memloc, op0);
8497
8498 op0 = memloc;
8499 }
8500
8501 if (GET_CODE (op0) != MEM)
8502 abort ();
8503
8504 mark_temp_addr_taken (op0);
8505 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8506 {
8507 op0 = XEXP (op0, 0);
8508 #ifdef POINTERS_EXTEND_UNSIGNED
8509 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8510 && mode == ptr_mode)
8511 op0 = convert_memory_address (ptr_mode, op0);
8512 #endif
8513 return op0;
8514 }
8515
8516 /* If OP0 is not aligned as least as much as the type requires, we
8517 need to make a temporary, copy OP0 to it, and take the address of
8518 the temporary. We want to use the alignment of the type, not of
8519 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8520 the test for BLKmode means that can't happen. The test for
8521 BLKmode is because we never make mis-aligned MEMs with
8522 non-BLKmode.
8523
8524 We don't need to do this at all if the machine doesn't have
8525 strict alignment. */
8526 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8527 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8528 > MEM_ALIGN (op0))
8529 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8530 {
8531 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8532 rtx new
8533 = assign_stack_temp_for_type
8534 (TYPE_MODE (inner_type),
8535 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8536 : int_size_in_bytes (inner_type),
8537 1, build_qualified_type (inner_type,
8538 (TYPE_QUALS (inner_type)
8539 | TYPE_QUAL_CONST)));
8540
8541 if (TYPE_ALIGN_OK (inner_type))
8542 abort ();
8543
8544 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8545 op0 = new;
8546 }
8547
8548 op0 = force_operand (XEXP (op0, 0), target);
8549 }
8550
8551 if (flag_force_addr
8552 && GET_CODE (op0) != REG
8553 && modifier != EXPAND_CONST_ADDRESS
8554 && modifier != EXPAND_INITIALIZER
8555 && modifier != EXPAND_SUM)
8556 op0 = force_reg (Pmode, op0);
8557
8558 if (GET_CODE (op0) == REG
8559 && ! REG_USERVAR_P (op0))
8560 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8561
8562 #ifdef POINTERS_EXTEND_UNSIGNED
8563 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8564 && mode == ptr_mode)
8565 op0 = convert_memory_address (ptr_mode, op0);
8566 #endif
8567
8568 return op0;
8569
8570 case ENTRY_VALUE_EXPR:
8571 abort ();
8572
8573 /* COMPLEX type for Extended Pascal & Fortran */
8574 case COMPLEX_EXPR:
8575 {
8576 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8577 rtx insns;
8578
8579 /* Get the rtx code of the operands. */
8580 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8581 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8582
8583 if (! target)
8584 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8585
8586 start_sequence ();
8587
8588 /* Move the real (op0) and imaginary (op1) parts to their location. */
8589 emit_move_insn (gen_realpart (mode, target), op0);
8590 emit_move_insn (gen_imagpart (mode, target), op1);
8591
8592 insns = get_insns ();
8593 end_sequence ();
8594
8595 /* Complex construction should appear as a single unit. */
8596 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8597 each with a separate pseudo as destination.
8598 It's not correct for flow to treat them as a unit. */
8599 if (GET_CODE (target) != CONCAT)
8600 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8601 else
8602 emit_insns (insns);
8603
8604 return target;
8605 }
8606
8607 case REALPART_EXPR:
8608 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8609 return gen_realpart (mode, op0);
8610
8611 case IMAGPART_EXPR:
8612 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8613 return gen_imagpart (mode, op0);
8614
8615 case CONJ_EXPR:
8616 {
8617 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8618 rtx imag_t;
8619 rtx insns;
8620
8621 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8622
8623 if (! target)
8624 target = gen_reg_rtx (mode);
8625
8626 start_sequence ();
8627
8628 /* Store the realpart and the negated imagpart to target. */
8629 emit_move_insn (gen_realpart (partmode, target),
8630 gen_realpart (partmode, op0));
8631
8632 imag_t = gen_imagpart (partmode, target);
8633 temp = expand_unop (partmode,
8634 ! unsignedp && flag_trapv
8635 && (GET_MODE_CLASS(partmode) == MODE_INT)
8636 ? negv_optab : neg_optab,
8637 gen_imagpart (partmode, op0), imag_t, 0);
8638 if (temp != imag_t)
8639 emit_move_insn (imag_t, temp);
8640
8641 insns = get_insns ();
8642 end_sequence ();
8643
8644 /* Conjugate should appear as a single unit
8645 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8646 each with a separate pseudo as destination.
8647 It's not correct for flow to treat them as a unit. */
8648 if (GET_CODE (target) != CONCAT)
8649 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8650 else
8651 emit_insns (insns);
8652
8653 return target;
8654 }
8655
8656 case TRY_CATCH_EXPR:
8657 {
8658 tree handler = TREE_OPERAND (exp, 1);
8659
8660 expand_eh_region_start ();
8661
8662 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8663
8664 expand_eh_region_end_cleanup (handler);
8665
8666 return op0;
8667 }
8668
8669 case TRY_FINALLY_EXPR:
8670 {
8671 tree try_block = TREE_OPERAND (exp, 0);
8672 tree finally_block = TREE_OPERAND (exp, 1);
8673 rtx finally_label = gen_label_rtx ();
8674 rtx done_label = gen_label_rtx ();
8675 rtx return_link = gen_reg_rtx (Pmode);
8676 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8677 (tree) finally_label, (tree) return_link);
8678 TREE_SIDE_EFFECTS (cleanup) = 1;
8679
8680 /* Start a new binding layer that will keep track of all cleanup
8681 actions to be performed. */
8682 expand_start_bindings (2);
8683
8684 target_temp_slot_level = temp_slot_level;
8685
8686 expand_decl_cleanup (NULL_TREE, cleanup);
8687 op0 = expand_expr (try_block, target, tmode, modifier);
8688
8689 preserve_temp_slots (op0);
8690 expand_end_bindings (NULL_TREE, 0, 0);
8691 emit_jump (done_label);
8692 emit_label (finally_label);
8693 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8694 emit_indirect_jump (return_link);
8695 emit_label (done_label);
8696 return op0;
8697 }
8698
8699 case GOTO_SUBROUTINE_EXPR:
8700 {
8701 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8702 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8703 rtx return_address = gen_label_rtx ();
8704 emit_move_insn (return_link,
8705 gen_rtx_LABEL_REF (Pmode, return_address));
8706 emit_jump (subr);
8707 emit_label (return_address);
8708 return const0_rtx;
8709 }
8710
8711 case VA_ARG_EXPR:
8712 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8713
8714 case EXC_PTR_EXPR:
8715 return get_exception_pointer (cfun);
8716
8717 case FDESC_EXPR:
8718 /* Function descriptors are not valid except for as
8719 initialization constants, and should not be expanded. */
8720 abort ();
8721
8722 default:
8723 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8724 }
8725
8726 /* Here to do an ordinary binary operator, generating an instruction
8727 from the optab already placed in `this_optab'. */
8728 binop:
8729 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8730 subtarget = 0;
8731 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8732 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8733 binop2:
8734 temp = expand_binop (mode, this_optab, op0, op1, target,
8735 unsignedp, OPTAB_LIB_WIDEN);
8736 if (temp == 0)
8737 abort ();
8738 return temp;
8739 }
8740 \f
8741 /* Return the tree node if a ARG corresponds to a string constant or zero
8742 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8743 in bytes within the string that ARG is accessing. The type of the
8744 offset will be `sizetype'. */
8745
8746 tree
8747 string_constant (arg, ptr_offset)
8748 tree arg;
8749 tree *ptr_offset;
8750 {
8751 STRIP_NOPS (arg);
8752
8753 if (TREE_CODE (arg) == ADDR_EXPR
8754 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8755 {
8756 *ptr_offset = size_zero_node;
8757 return TREE_OPERAND (arg, 0);
8758 }
8759 else if (TREE_CODE (arg) == PLUS_EXPR)
8760 {
8761 tree arg0 = TREE_OPERAND (arg, 0);
8762 tree arg1 = TREE_OPERAND (arg, 1);
8763
8764 STRIP_NOPS (arg0);
8765 STRIP_NOPS (arg1);
8766
8767 if (TREE_CODE (arg0) == ADDR_EXPR
8768 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8769 {
8770 *ptr_offset = convert (sizetype, arg1);
8771 return TREE_OPERAND (arg0, 0);
8772 }
8773 else if (TREE_CODE (arg1) == ADDR_EXPR
8774 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8775 {
8776 *ptr_offset = convert (sizetype, arg0);
8777 return TREE_OPERAND (arg1, 0);
8778 }
8779 }
8780
8781 return 0;
8782 }
8783 \f
8784 /* Expand code for a post- or pre- increment or decrement
8785 and return the RTX for the result.
8786 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8787
8788 static rtx
8789 expand_increment (exp, post, ignore)
8790 tree exp;
8791 int post, ignore;
8792 {
8793 rtx op0, op1;
8794 rtx temp, value;
8795 tree incremented = TREE_OPERAND (exp, 0);
8796 optab this_optab = add_optab;
8797 int icode;
8798 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8799 int op0_is_copy = 0;
8800 int single_insn = 0;
8801 /* 1 means we can't store into OP0 directly,
8802 because it is a subreg narrower than a word,
8803 and we don't dare clobber the rest of the word. */
8804 int bad_subreg = 0;
8805
8806 /* Stabilize any component ref that might need to be
8807 evaluated more than once below. */
8808 if (!post
8809 || TREE_CODE (incremented) == BIT_FIELD_REF
8810 || (TREE_CODE (incremented) == COMPONENT_REF
8811 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8812 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8813 incremented = stabilize_reference (incremented);
8814 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8815 ones into save exprs so that they don't accidentally get evaluated
8816 more than once by the code below. */
8817 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8818 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8819 incremented = save_expr (incremented);
8820
8821 /* Compute the operands as RTX.
8822 Note whether OP0 is the actual lvalue or a copy of it:
8823 I believe it is a copy iff it is a register or subreg
8824 and insns were generated in computing it. */
8825
8826 temp = get_last_insn ();
8827 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8828
8829 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8830 in place but instead must do sign- or zero-extension during assignment,
8831 so we copy it into a new register and let the code below use it as
8832 a copy.
8833
8834 Note that we can safely modify this SUBREG since it is know not to be
8835 shared (it was made by the expand_expr call above). */
8836
8837 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8838 {
8839 if (post)
8840 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8841 else
8842 bad_subreg = 1;
8843 }
8844 else if (GET_CODE (op0) == SUBREG
8845 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8846 {
8847 /* We cannot increment this SUBREG in place. If we are
8848 post-incrementing, get a copy of the old value. Otherwise,
8849 just mark that we cannot increment in place. */
8850 if (post)
8851 op0 = copy_to_reg (op0);
8852 else
8853 bad_subreg = 1;
8854 }
8855
8856 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8857 && temp != get_last_insn ());
8858 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8859
8860 /* Decide whether incrementing or decrementing. */
8861 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8862 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8863 this_optab = sub_optab;
8864
8865 /* Convert decrement by a constant into a negative increment. */
8866 if (this_optab == sub_optab
8867 && GET_CODE (op1) == CONST_INT)
8868 {
8869 op1 = GEN_INT (-INTVAL (op1));
8870 this_optab = add_optab;
8871 }
8872
8873 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8874 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8875
8876 /* For a preincrement, see if we can do this with a single instruction. */
8877 if (!post)
8878 {
8879 icode = (int) this_optab->handlers[(int) mode].insn_code;
8880 if (icode != (int) CODE_FOR_nothing
8881 /* Make sure that OP0 is valid for operands 0 and 1
8882 of the insn we want to queue. */
8883 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8884 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8885 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8886 single_insn = 1;
8887 }
8888
8889 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8890 then we cannot just increment OP0. We must therefore contrive to
8891 increment the original value. Then, for postincrement, we can return
8892 OP0 since it is a copy of the old value. For preincrement, expand here
8893 unless we can do it with a single insn.
8894
8895 Likewise if storing directly into OP0 would clobber high bits
8896 we need to preserve (bad_subreg). */
8897 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8898 {
8899 /* This is the easiest way to increment the value wherever it is.
8900 Problems with multiple evaluation of INCREMENTED are prevented
8901 because either (1) it is a component_ref or preincrement,
8902 in which case it was stabilized above, or (2) it is an array_ref
8903 with constant index in an array in a register, which is
8904 safe to reevaluate. */
8905 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8906 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8907 ? MINUS_EXPR : PLUS_EXPR),
8908 TREE_TYPE (exp),
8909 incremented,
8910 TREE_OPERAND (exp, 1));
8911
8912 while (TREE_CODE (incremented) == NOP_EXPR
8913 || TREE_CODE (incremented) == CONVERT_EXPR)
8914 {
8915 newexp = convert (TREE_TYPE (incremented), newexp);
8916 incremented = TREE_OPERAND (incremented, 0);
8917 }
8918
8919 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8920 return post ? op0 : temp;
8921 }
8922
8923 if (post)
8924 {
8925 /* We have a true reference to the value in OP0.
8926 If there is an insn to add or subtract in this mode, queue it.
8927 Queueing the increment insn avoids the register shuffling
8928 that often results if we must increment now and first save
8929 the old value for subsequent use. */
8930
8931 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8932 op0 = stabilize (op0);
8933 #endif
8934
8935 icode = (int) this_optab->handlers[(int) mode].insn_code;
8936 if (icode != (int) CODE_FOR_nothing
8937 /* Make sure that OP0 is valid for operands 0 and 1
8938 of the insn we want to queue. */
8939 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8940 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8941 {
8942 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8943 op1 = force_reg (mode, op1);
8944
8945 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8946 }
8947 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8948 {
8949 rtx addr = (general_operand (XEXP (op0, 0), mode)
8950 ? force_reg (Pmode, XEXP (op0, 0))
8951 : copy_to_reg (XEXP (op0, 0)));
8952 rtx temp, result;
8953
8954 op0 = replace_equiv_address (op0, addr);
8955 temp = force_reg (GET_MODE (op0), op0);
8956 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8957 op1 = force_reg (mode, op1);
8958
8959 /* The increment queue is LIFO, thus we have to `queue'
8960 the instructions in reverse order. */
8961 enqueue_insn (op0, gen_move_insn (op0, temp));
8962 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8963 return result;
8964 }
8965 }
8966
8967 /* Preincrement, or we can't increment with one simple insn. */
8968 if (post)
8969 /* Save a copy of the value before inc or dec, to return it later. */
8970 temp = value = copy_to_reg (op0);
8971 else
8972 /* Arrange to return the incremented value. */
8973 /* Copy the rtx because expand_binop will protect from the queue,
8974 and the results of that would be invalid for us to return
8975 if our caller does emit_queue before using our result. */
8976 temp = copy_rtx (value = op0);
8977
8978 /* Increment however we can. */
8979 op1 = expand_binop (mode, this_optab, value, op1, op0,
8980 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8981
8982 /* Make sure the value is stored into OP0. */
8983 if (op1 != op0)
8984 emit_move_insn (op0, op1);
8985
8986 return temp;
8987 }
8988 \f
8989 /* At the start of a function, record that we have no previously-pushed
8990 arguments waiting to be popped. */
8991
8992 void
8993 init_pending_stack_adjust ()
8994 {
8995 pending_stack_adjust = 0;
8996 }
8997
8998 /* When exiting from function, if safe, clear out any pending stack adjust
8999 so the adjustment won't get done.
9000
9001 Note, if the current function calls alloca, then it must have a
9002 frame pointer regardless of the value of flag_omit_frame_pointer. */
9003
9004 void
9005 clear_pending_stack_adjust ()
9006 {
9007 #ifdef EXIT_IGNORE_STACK
9008 if (optimize > 0
9009 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9010 && EXIT_IGNORE_STACK
9011 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9012 && ! flag_inline_functions)
9013 {
9014 stack_pointer_delta -= pending_stack_adjust,
9015 pending_stack_adjust = 0;
9016 }
9017 #endif
9018 }
9019
9020 /* Pop any previously-pushed arguments that have not been popped yet. */
9021
9022 void
9023 do_pending_stack_adjust ()
9024 {
9025 if (inhibit_defer_pop == 0)
9026 {
9027 if (pending_stack_adjust != 0)
9028 adjust_stack (GEN_INT (pending_stack_adjust));
9029 pending_stack_adjust = 0;
9030 }
9031 }
9032 \f
9033 /* Expand conditional expressions. */
9034
9035 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9036 LABEL is an rtx of code CODE_LABEL, in this function and all the
9037 functions here. */
9038
9039 void
9040 jumpifnot (exp, label)
9041 tree exp;
9042 rtx label;
9043 {
9044 do_jump (exp, label, NULL_RTX);
9045 }
9046
9047 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9048
9049 void
9050 jumpif (exp, label)
9051 tree exp;
9052 rtx label;
9053 {
9054 do_jump (exp, NULL_RTX, label);
9055 }
9056
9057 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9058 the result is zero, or IF_TRUE_LABEL if the result is one.
9059 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9060 meaning fall through in that case.
9061
9062 do_jump always does any pending stack adjust except when it does not
9063 actually perform a jump. An example where there is no jump
9064 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9065
9066 This function is responsible for optimizing cases such as
9067 &&, || and comparison operators in EXP. */
9068
9069 void
9070 do_jump (exp, if_false_label, if_true_label)
9071 tree exp;
9072 rtx if_false_label, if_true_label;
9073 {
9074 enum tree_code code = TREE_CODE (exp);
9075 /* Some cases need to create a label to jump to
9076 in order to properly fall through.
9077 These cases set DROP_THROUGH_LABEL nonzero. */
9078 rtx drop_through_label = 0;
9079 rtx temp;
9080 int i;
9081 tree type;
9082 enum machine_mode mode;
9083
9084 #ifdef MAX_INTEGER_COMPUTATION_MODE
9085 check_max_integer_computation_mode (exp);
9086 #endif
9087
9088 emit_queue ();
9089
9090 switch (code)
9091 {
9092 case ERROR_MARK:
9093 break;
9094
9095 case INTEGER_CST:
9096 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9097 if (temp)
9098 emit_jump (temp);
9099 break;
9100
9101 #if 0
9102 /* This is not true with #pragma weak */
9103 case ADDR_EXPR:
9104 /* The address of something can never be zero. */
9105 if (if_true_label)
9106 emit_jump (if_true_label);
9107 break;
9108 #endif
9109
9110 case NOP_EXPR:
9111 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9112 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9113 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9114 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9115 goto normal;
9116 case CONVERT_EXPR:
9117 /* If we are narrowing the operand, we have to do the compare in the
9118 narrower mode. */
9119 if ((TYPE_PRECISION (TREE_TYPE (exp))
9120 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9121 goto normal;
9122 case NON_LVALUE_EXPR:
9123 case REFERENCE_EXPR:
9124 case ABS_EXPR:
9125 case NEGATE_EXPR:
9126 case LROTATE_EXPR:
9127 case RROTATE_EXPR:
9128 /* These cannot change zero->non-zero or vice versa. */
9129 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9130 break;
9131
9132 case WITH_RECORD_EXPR:
9133 /* Put the object on the placeholder list, recurse through our first
9134 operand, and pop the list. */
9135 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9136 placeholder_list);
9137 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9138 placeholder_list = TREE_CHAIN (placeholder_list);
9139 break;
9140
9141 #if 0
9142 /* This is never less insns than evaluating the PLUS_EXPR followed by
9143 a test and can be longer if the test is eliminated. */
9144 case PLUS_EXPR:
9145 /* Reduce to minus. */
9146 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9147 TREE_OPERAND (exp, 0),
9148 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9149 TREE_OPERAND (exp, 1))));
9150 /* Process as MINUS. */
9151 #endif
9152
9153 case MINUS_EXPR:
9154 /* Non-zero iff operands of minus differ. */
9155 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9156 TREE_OPERAND (exp, 0),
9157 TREE_OPERAND (exp, 1)),
9158 NE, NE, if_false_label, if_true_label);
9159 break;
9160
9161 case BIT_AND_EXPR:
9162 /* If we are AND'ing with a small constant, do this comparison in the
9163 smallest type that fits. If the machine doesn't have comparisons
9164 that small, it will be converted back to the wider comparison.
9165 This helps if we are testing the sign bit of a narrower object.
9166 combine can't do this for us because it can't know whether a
9167 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9168
9169 if (! SLOW_BYTE_ACCESS
9170 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9171 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9172 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9173 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9174 && (type = type_for_mode (mode, 1)) != 0
9175 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9176 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9177 != CODE_FOR_nothing))
9178 {
9179 do_jump (convert (type, exp), if_false_label, if_true_label);
9180 break;
9181 }
9182 goto normal;
9183
9184 case TRUTH_NOT_EXPR:
9185 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9186 break;
9187
9188 case TRUTH_ANDIF_EXPR:
9189 if (if_false_label == 0)
9190 if_false_label = drop_through_label = gen_label_rtx ();
9191 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9192 start_cleanup_deferral ();
9193 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9194 end_cleanup_deferral ();
9195 break;
9196
9197 case TRUTH_ORIF_EXPR:
9198 if (if_true_label == 0)
9199 if_true_label = drop_through_label = gen_label_rtx ();
9200 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9201 start_cleanup_deferral ();
9202 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9203 end_cleanup_deferral ();
9204 break;
9205
9206 case COMPOUND_EXPR:
9207 push_temp_slots ();
9208 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9209 preserve_temp_slots (NULL_RTX);
9210 free_temp_slots ();
9211 pop_temp_slots ();
9212 emit_queue ();
9213 do_pending_stack_adjust ();
9214 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9215 break;
9216
9217 case COMPONENT_REF:
9218 case BIT_FIELD_REF:
9219 case ARRAY_REF:
9220 case ARRAY_RANGE_REF:
9221 {
9222 HOST_WIDE_INT bitsize, bitpos;
9223 int unsignedp;
9224 enum machine_mode mode;
9225 tree type;
9226 tree offset;
9227 int volatilep = 0;
9228
9229 /* Get description of this reference. We don't actually care
9230 about the underlying object here. */
9231 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9232 &unsignedp, &volatilep);
9233
9234 type = type_for_size (bitsize, unsignedp);
9235 if (! SLOW_BYTE_ACCESS
9236 && type != 0 && bitsize >= 0
9237 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9238 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9239 != CODE_FOR_nothing))
9240 {
9241 do_jump (convert (type, exp), if_false_label, if_true_label);
9242 break;
9243 }
9244 goto normal;
9245 }
9246
9247 case COND_EXPR:
9248 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9249 if (integer_onep (TREE_OPERAND (exp, 1))
9250 && integer_zerop (TREE_OPERAND (exp, 2)))
9251 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9252
9253 else if (integer_zerop (TREE_OPERAND (exp, 1))
9254 && integer_onep (TREE_OPERAND (exp, 2)))
9255 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9256
9257 else
9258 {
9259 rtx label1 = gen_label_rtx ();
9260 drop_through_label = gen_label_rtx ();
9261
9262 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9263
9264 start_cleanup_deferral ();
9265 /* Now the THEN-expression. */
9266 do_jump (TREE_OPERAND (exp, 1),
9267 if_false_label ? if_false_label : drop_through_label,
9268 if_true_label ? if_true_label : drop_through_label);
9269 /* In case the do_jump just above never jumps. */
9270 do_pending_stack_adjust ();
9271 emit_label (label1);
9272
9273 /* Now the ELSE-expression. */
9274 do_jump (TREE_OPERAND (exp, 2),
9275 if_false_label ? if_false_label : drop_through_label,
9276 if_true_label ? if_true_label : drop_through_label);
9277 end_cleanup_deferral ();
9278 }
9279 break;
9280
9281 case EQ_EXPR:
9282 {
9283 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9284
9285 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9286 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9287 {
9288 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9289 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9290 do_jump
9291 (fold
9292 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9293 fold (build (EQ_EXPR, TREE_TYPE (exp),
9294 fold (build1 (REALPART_EXPR,
9295 TREE_TYPE (inner_type),
9296 exp0)),
9297 fold (build1 (REALPART_EXPR,
9298 TREE_TYPE (inner_type),
9299 exp1)))),
9300 fold (build (EQ_EXPR, TREE_TYPE (exp),
9301 fold (build1 (IMAGPART_EXPR,
9302 TREE_TYPE (inner_type),
9303 exp0)),
9304 fold (build1 (IMAGPART_EXPR,
9305 TREE_TYPE (inner_type),
9306 exp1)))))),
9307 if_false_label, if_true_label);
9308 }
9309
9310 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9311 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9312
9313 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9314 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9315 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9316 else
9317 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9318 break;
9319 }
9320
9321 case NE_EXPR:
9322 {
9323 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9324
9325 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9326 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9327 {
9328 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9329 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9330 do_jump
9331 (fold
9332 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9333 fold (build (NE_EXPR, TREE_TYPE (exp),
9334 fold (build1 (REALPART_EXPR,
9335 TREE_TYPE (inner_type),
9336 exp0)),
9337 fold (build1 (REALPART_EXPR,
9338 TREE_TYPE (inner_type),
9339 exp1)))),
9340 fold (build (NE_EXPR, TREE_TYPE (exp),
9341 fold (build1 (IMAGPART_EXPR,
9342 TREE_TYPE (inner_type),
9343 exp0)),
9344 fold (build1 (IMAGPART_EXPR,
9345 TREE_TYPE (inner_type),
9346 exp1)))))),
9347 if_false_label, if_true_label);
9348 }
9349
9350 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9351 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9352
9353 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9354 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9355 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9356 else
9357 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9358 break;
9359 }
9360
9361 case LT_EXPR:
9362 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9363 if (GET_MODE_CLASS (mode) == MODE_INT
9364 && ! can_compare_p (LT, mode, ccp_jump))
9365 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9366 else
9367 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9368 break;
9369
9370 case LE_EXPR:
9371 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9372 if (GET_MODE_CLASS (mode) == MODE_INT
9373 && ! can_compare_p (LE, mode, ccp_jump))
9374 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9375 else
9376 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9377 break;
9378
9379 case GT_EXPR:
9380 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9381 if (GET_MODE_CLASS (mode) == MODE_INT
9382 && ! can_compare_p (GT, mode, ccp_jump))
9383 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9384 else
9385 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9386 break;
9387
9388 case GE_EXPR:
9389 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9390 if (GET_MODE_CLASS (mode) == MODE_INT
9391 && ! can_compare_p (GE, mode, ccp_jump))
9392 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9393 else
9394 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9395 break;
9396
9397 case UNORDERED_EXPR:
9398 case ORDERED_EXPR:
9399 {
9400 enum rtx_code cmp, rcmp;
9401 int do_rev;
9402
9403 if (code == UNORDERED_EXPR)
9404 cmp = UNORDERED, rcmp = ORDERED;
9405 else
9406 cmp = ORDERED, rcmp = UNORDERED;
9407 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9408
9409 do_rev = 0;
9410 if (! can_compare_p (cmp, mode, ccp_jump)
9411 && (can_compare_p (rcmp, mode, ccp_jump)
9412 /* If the target doesn't provide either UNORDERED or ORDERED
9413 comparisons, canonicalize on UNORDERED for the library. */
9414 || rcmp == UNORDERED))
9415 do_rev = 1;
9416
9417 if (! do_rev)
9418 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9419 else
9420 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9421 }
9422 break;
9423
9424 {
9425 enum rtx_code rcode1;
9426 enum tree_code tcode2;
9427
9428 case UNLT_EXPR:
9429 rcode1 = UNLT;
9430 tcode2 = LT_EXPR;
9431 goto unordered_bcc;
9432 case UNLE_EXPR:
9433 rcode1 = UNLE;
9434 tcode2 = LE_EXPR;
9435 goto unordered_bcc;
9436 case UNGT_EXPR:
9437 rcode1 = UNGT;
9438 tcode2 = GT_EXPR;
9439 goto unordered_bcc;
9440 case UNGE_EXPR:
9441 rcode1 = UNGE;
9442 tcode2 = GE_EXPR;
9443 goto unordered_bcc;
9444 case UNEQ_EXPR:
9445 rcode1 = UNEQ;
9446 tcode2 = EQ_EXPR;
9447 goto unordered_bcc;
9448
9449 unordered_bcc:
9450 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9451 if (can_compare_p (rcode1, mode, ccp_jump))
9452 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9453 if_true_label);
9454 else
9455 {
9456 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9457 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9458 tree cmp0, cmp1;
9459
9460 /* If the target doesn't support combined unordered
9461 compares, decompose into UNORDERED + comparison. */
9462 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9463 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9464 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9465 do_jump (exp, if_false_label, if_true_label);
9466 }
9467 }
9468 break;
9469
9470 /* Special case:
9471 __builtin_expect (<test>, 0) and
9472 __builtin_expect (<test>, 1)
9473
9474 We need to do this here, so that <test> is not converted to a SCC
9475 operation on machines that use condition code registers and COMPARE
9476 like the PowerPC, and then the jump is done based on whether the SCC
9477 operation produced a 1 or 0. */
9478 case CALL_EXPR:
9479 /* Check for a built-in function. */
9480 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9481 {
9482 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9483 tree arglist = TREE_OPERAND (exp, 1);
9484
9485 if (TREE_CODE (fndecl) == FUNCTION_DECL
9486 && DECL_BUILT_IN (fndecl)
9487 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9488 && arglist != NULL_TREE
9489 && TREE_CHAIN (arglist) != NULL_TREE)
9490 {
9491 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9492 if_true_label);
9493
9494 if (seq != NULL_RTX)
9495 {
9496 emit_insn (seq);
9497 return;
9498 }
9499 }
9500 }
9501 /* fall through and generate the normal code. */
9502
9503 default:
9504 normal:
9505 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9506 #if 0
9507 /* This is not needed any more and causes poor code since it causes
9508 comparisons and tests from non-SI objects to have different code
9509 sequences. */
9510 /* Copy to register to avoid generating bad insns by cse
9511 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9512 if (!cse_not_expected && GET_CODE (temp) == MEM)
9513 temp = copy_to_reg (temp);
9514 #endif
9515 do_pending_stack_adjust ();
9516 /* Do any postincrements in the expression that was tested. */
9517 emit_queue ();
9518
9519 if (GET_CODE (temp) == CONST_INT
9520 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9521 || GET_CODE (temp) == LABEL_REF)
9522 {
9523 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9524 if (target)
9525 emit_jump (target);
9526 }
9527 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9528 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9529 /* Note swapping the labels gives us not-equal. */
9530 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9531 else if (GET_MODE (temp) != VOIDmode)
9532 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9533 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9534 GET_MODE (temp), NULL_RTX,
9535 if_false_label, if_true_label);
9536 else
9537 abort ();
9538 }
9539
9540 if (drop_through_label)
9541 {
9542 /* If do_jump produces code that might be jumped around,
9543 do any stack adjusts from that code, before the place
9544 where control merges in. */
9545 do_pending_stack_adjust ();
9546 emit_label (drop_through_label);
9547 }
9548 }
9549 \f
9550 /* Given a comparison expression EXP for values too wide to be compared
9551 with one insn, test the comparison and jump to the appropriate label.
9552 The code of EXP is ignored; we always test GT if SWAP is 0,
9553 and LT if SWAP is 1. */
9554
9555 static void
9556 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9557 tree exp;
9558 int swap;
9559 rtx if_false_label, if_true_label;
9560 {
9561 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9562 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9563 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9564 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9565
9566 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9567 }
9568
9569 /* Compare OP0 with OP1, word at a time, in mode MODE.
9570 UNSIGNEDP says to do unsigned comparison.
9571 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9572
9573 void
9574 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9575 enum machine_mode mode;
9576 int unsignedp;
9577 rtx op0, op1;
9578 rtx if_false_label, if_true_label;
9579 {
9580 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9581 rtx drop_through_label = 0;
9582 int i;
9583
9584 if (! if_true_label || ! if_false_label)
9585 drop_through_label = gen_label_rtx ();
9586 if (! if_true_label)
9587 if_true_label = drop_through_label;
9588 if (! if_false_label)
9589 if_false_label = drop_through_label;
9590
9591 /* Compare a word at a time, high order first. */
9592 for (i = 0; i < nwords; i++)
9593 {
9594 rtx op0_word, op1_word;
9595
9596 if (WORDS_BIG_ENDIAN)
9597 {
9598 op0_word = operand_subword_force (op0, i, mode);
9599 op1_word = operand_subword_force (op1, i, mode);
9600 }
9601 else
9602 {
9603 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9604 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9605 }
9606
9607 /* All but high-order word must be compared as unsigned. */
9608 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9609 (unsignedp || i > 0), word_mode, NULL_RTX,
9610 NULL_RTX, if_true_label);
9611
9612 /* Consider lower words only if these are equal. */
9613 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9614 NULL_RTX, NULL_RTX, if_false_label);
9615 }
9616
9617 if (if_false_label)
9618 emit_jump (if_false_label);
9619 if (drop_through_label)
9620 emit_label (drop_through_label);
9621 }
9622
9623 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9624 with one insn, test the comparison and jump to the appropriate label. */
9625
9626 static void
9627 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9628 tree exp;
9629 rtx if_false_label, if_true_label;
9630 {
9631 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9632 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9633 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9634 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9635 int i;
9636 rtx drop_through_label = 0;
9637
9638 if (! if_false_label)
9639 drop_through_label = if_false_label = gen_label_rtx ();
9640
9641 for (i = 0; i < nwords; i++)
9642 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9643 operand_subword_force (op1, i, mode),
9644 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9645 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9646
9647 if (if_true_label)
9648 emit_jump (if_true_label);
9649 if (drop_through_label)
9650 emit_label (drop_through_label);
9651 }
9652 \f
9653 /* Jump according to whether OP0 is 0.
9654 We assume that OP0 has an integer mode that is too wide
9655 for the available compare insns. */
9656
9657 void
9658 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9659 rtx op0;
9660 rtx if_false_label, if_true_label;
9661 {
9662 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9663 rtx part;
9664 int i;
9665 rtx drop_through_label = 0;
9666
9667 /* The fastest way of doing this comparison on almost any machine is to
9668 "or" all the words and compare the result. If all have to be loaded
9669 from memory and this is a very wide item, it's possible this may
9670 be slower, but that's highly unlikely. */
9671
9672 part = gen_reg_rtx (word_mode);
9673 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9674 for (i = 1; i < nwords && part != 0; i++)
9675 part = expand_binop (word_mode, ior_optab, part,
9676 operand_subword_force (op0, i, GET_MODE (op0)),
9677 part, 1, OPTAB_WIDEN);
9678
9679 if (part != 0)
9680 {
9681 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9682 NULL_RTX, if_false_label, if_true_label);
9683
9684 return;
9685 }
9686
9687 /* If we couldn't do the "or" simply, do this with a series of compares. */
9688 if (! if_false_label)
9689 drop_through_label = if_false_label = gen_label_rtx ();
9690
9691 for (i = 0; i < nwords; i++)
9692 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9693 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9694 if_false_label, NULL_RTX);
9695
9696 if (if_true_label)
9697 emit_jump (if_true_label);
9698
9699 if (drop_through_label)
9700 emit_label (drop_through_label);
9701 }
9702 \f
9703 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9704 (including code to compute the values to be compared)
9705 and set (CC0) according to the result.
9706 The decision as to signed or unsigned comparison must be made by the caller.
9707
9708 We force a stack adjustment unless there are currently
9709 things pushed on the stack that aren't yet used.
9710
9711 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9712 compared. */
9713
9714 rtx
9715 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9716 rtx op0, op1;
9717 enum rtx_code code;
9718 int unsignedp;
9719 enum machine_mode mode;
9720 rtx size;
9721 {
9722 rtx tem;
9723
9724 /* If one operand is constant, make it the second one. Only do this
9725 if the other operand is not constant as well. */
9726
9727 if (swap_commutative_operands_p (op0, op1))
9728 {
9729 tem = op0;
9730 op0 = op1;
9731 op1 = tem;
9732 code = swap_condition (code);
9733 }
9734
9735 if (flag_force_mem)
9736 {
9737 op0 = force_not_mem (op0);
9738 op1 = force_not_mem (op1);
9739 }
9740
9741 do_pending_stack_adjust ();
9742
9743 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9744 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9745 return tem;
9746
9747 #if 0
9748 /* There's no need to do this now that combine.c can eliminate lots of
9749 sign extensions. This can be less efficient in certain cases on other
9750 machines. */
9751
9752 /* If this is a signed equality comparison, we can do it as an
9753 unsigned comparison since zero-extension is cheaper than sign
9754 extension and comparisons with zero are done as unsigned. This is
9755 the case even on machines that can do fast sign extension, since
9756 zero-extension is easier to combine with other operations than
9757 sign-extension is. If we are comparing against a constant, we must
9758 convert it to what it would look like unsigned. */
9759 if ((code == EQ || code == NE) && ! unsignedp
9760 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9761 {
9762 if (GET_CODE (op1) == CONST_INT
9763 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9764 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9765 unsignedp = 1;
9766 }
9767 #endif
9768
9769 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9770
9771 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9772 }
9773
9774 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9775 The decision as to signed or unsigned comparison must be made by the caller.
9776
9777 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9778 compared. */
9779
9780 void
9781 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9782 if_false_label, if_true_label)
9783 rtx op0, op1;
9784 enum rtx_code code;
9785 int unsignedp;
9786 enum machine_mode mode;
9787 rtx size;
9788 rtx if_false_label, if_true_label;
9789 {
9790 rtx tem;
9791 int dummy_true_label = 0;
9792
9793 /* Reverse the comparison if that is safe and we want to jump if it is
9794 false. */
9795 if (! if_true_label && ! FLOAT_MODE_P (mode))
9796 {
9797 if_true_label = if_false_label;
9798 if_false_label = 0;
9799 code = reverse_condition (code);
9800 }
9801
9802 /* If one operand is constant, make it the second one. Only do this
9803 if the other operand is not constant as well. */
9804
9805 if (swap_commutative_operands_p (op0, op1))
9806 {
9807 tem = op0;
9808 op0 = op1;
9809 op1 = tem;
9810 code = swap_condition (code);
9811 }
9812
9813 if (flag_force_mem)
9814 {
9815 op0 = force_not_mem (op0);
9816 op1 = force_not_mem (op1);
9817 }
9818
9819 do_pending_stack_adjust ();
9820
9821 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9822 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9823 {
9824 if (tem == const_true_rtx)
9825 {
9826 if (if_true_label)
9827 emit_jump (if_true_label);
9828 }
9829 else
9830 {
9831 if (if_false_label)
9832 emit_jump (if_false_label);
9833 }
9834 return;
9835 }
9836
9837 #if 0
9838 /* There's no need to do this now that combine.c can eliminate lots of
9839 sign extensions. This can be less efficient in certain cases on other
9840 machines. */
9841
9842 /* If this is a signed equality comparison, we can do it as an
9843 unsigned comparison since zero-extension is cheaper than sign
9844 extension and comparisons with zero are done as unsigned. This is
9845 the case even on machines that can do fast sign extension, since
9846 zero-extension is easier to combine with other operations than
9847 sign-extension is. If we are comparing against a constant, we must
9848 convert it to what it would look like unsigned. */
9849 if ((code == EQ || code == NE) && ! unsignedp
9850 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9851 {
9852 if (GET_CODE (op1) == CONST_INT
9853 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9854 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9855 unsignedp = 1;
9856 }
9857 #endif
9858
9859 if (! if_true_label)
9860 {
9861 dummy_true_label = 1;
9862 if_true_label = gen_label_rtx ();
9863 }
9864
9865 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9866 if_true_label);
9867
9868 if (if_false_label)
9869 emit_jump (if_false_label);
9870 if (dummy_true_label)
9871 emit_label (if_true_label);
9872 }
9873
9874 /* Generate code for a comparison expression EXP (including code to compute
9875 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9876 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9877 generated code will drop through.
9878 SIGNED_CODE should be the rtx operation for this comparison for
9879 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9880
9881 We force a stack adjustment unless there are currently
9882 things pushed on the stack that aren't yet used. */
9883
9884 static void
9885 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9886 if_true_label)
9887 tree exp;
9888 enum rtx_code signed_code, unsigned_code;
9889 rtx if_false_label, if_true_label;
9890 {
9891 rtx op0, op1;
9892 tree type;
9893 enum machine_mode mode;
9894 int unsignedp;
9895 enum rtx_code code;
9896
9897 /* Don't crash if the comparison was erroneous. */
9898 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9899 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9900 return;
9901
9902 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9903 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9904 return;
9905
9906 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9907 mode = TYPE_MODE (type);
9908 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9909 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9910 || (GET_MODE_BITSIZE (mode)
9911 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9912 1)))))))
9913 {
9914 /* op0 might have been replaced by promoted constant, in which
9915 case the type of second argument should be used. */
9916 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9917 mode = TYPE_MODE (type);
9918 }
9919 unsignedp = TREE_UNSIGNED (type);
9920 code = unsignedp ? unsigned_code : signed_code;
9921
9922 #ifdef HAVE_canonicalize_funcptr_for_compare
9923 /* If function pointers need to be "canonicalized" before they can
9924 be reliably compared, then canonicalize them. */
9925 if (HAVE_canonicalize_funcptr_for_compare
9926 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9927 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9928 == FUNCTION_TYPE))
9929 {
9930 rtx new_op0 = gen_reg_rtx (mode);
9931
9932 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9933 op0 = new_op0;
9934 }
9935
9936 if (HAVE_canonicalize_funcptr_for_compare
9937 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9938 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9939 == FUNCTION_TYPE))
9940 {
9941 rtx new_op1 = gen_reg_rtx (mode);
9942
9943 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9944 op1 = new_op1;
9945 }
9946 #endif
9947
9948 /* Do any postincrements in the expression that was tested. */
9949 emit_queue ();
9950
9951 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9952 ((mode == BLKmode)
9953 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9954 if_false_label, if_true_label);
9955 }
9956 \f
9957 /* Generate code to calculate EXP using a store-flag instruction
9958 and return an rtx for the result. EXP is either a comparison
9959 or a TRUTH_NOT_EXPR whose operand is a comparison.
9960
9961 If TARGET is nonzero, store the result there if convenient.
9962
9963 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9964 cheap.
9965
9966 Return zero if there is no suitable set-flag instruction
9967 available on this machine.
9968
9969 Once expand_expr has been called on the arguments of the comparison,
9970 we are committed to doing the store flag, since it is not safe to
9971 re-evaluate the expression. We emit the store-flag insn by calling
9972 emit_store_flag, but only expand the arguments if we have a reason
9973 to believe that emit_store_flag will be successful. If we think that
9974 it will, but it isn't, we have to simulate the store-flag with a
9975 set/jump/set sequence. */
9976
9977 static rtx
9978 do_store_flag (exp, target, mode, only_cheap)
9979 tree exp;
9980 rtx target;
9981 enum machine_mode mode;
9982 int only_cheap;
9983 {
9984 enum rtx_code code;
9985 tree arg0, arg1, type;
9986 tree tem;
9987 enum machine_mode operand_mode;
9988 int invert = 0;
9989 int unsignedp;
9990 rtx op0, op1;
9991 enum insn_code icode;
9992 rtx subtarget = target;
9993 rtx result, label;
9994
9995 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9996 result at the end. We can't simply invert the test since it would
9997 have already been inverted if it were valid. This case occurs for
9998 some floating-point comparisons. */
9999
10000 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10001 invert = 1, exp = TREE_OPERAND (exp, 0);
10002
10003 arg0 = TREE_OPERAND (exp, 0);
10004 arg1 = TREE_OPERAND (exp, 1);
10005
10006 /* Don't crash if the comparison was erroneous. */
10007 if (arg0 == error_mark_node || arg1 == error_mark_node)
10008 return const0_rtx;
10009
10010 type = TREE_TYPE (arg0);
10011 operand_mode = TYPE_MODE (type);
10012 unsignedp = TREE_UNSIGNED (type);
10013
10014 /* We won't bother with BLKmode store-flag operations because it would mean
10015 passing a lot of information to emit_store_flag. */
10016 if (operand_mode == BLKmode)
10017 return 0;
10018
10019 /* We won't bother with store-flag operations involving function pointers
10020 when function pointers must be canonicalized before comparisons. */
10021 #ifdef HAVE_canonicalize_funcptr_for_compare
10022 if (HAVE_canonicalize_funcptr_for_compare
10023 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10024 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10025 == FUNCTION_TYPE))
10026 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10027 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10028 == FUNCTION_TYPE))))
10029 return 0;
10030 #endif
10031
10032 STRIP_NOPS (arg0);
10033 STRIP_NOPS (arg1);
10034
10035 /* Get the rtx comparison code to use. We know that EXP is a comparison
10036 operation of some type. Some comparisons against 1 and -1 can be
10037 converted to comparisons with zero. Do so here so that the tests
10038 below will be aware that we have a comparison with zero. These
10039 tests will not catch constants in the first operand, but constants
10040 are rarely passed as the first operand. */
10041
10042 switch (TREE_CODE (exp))
10043 {
10044 case EQ_EXPR:
10045 code = EQ;
10046 break;
10047 case NE_EXPR:
10048 code = NE;
10049 break;
10050 case LT_EXPR:
10051 if (integer_onep (arg1))
10052 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10053 else
10054 code = unsignedp ? LTU : LT;
10055 break;
10056 case LE_EXPR:
10057 if (! unsignedp && integer_all_onesp (arg1))
10058 arg1 = integer_zero_node, code = LT;
10059 else
10060 code = unsignedp ? LEU : LE;
10061 break;
10062 case GT_EXPR:
10063 if (! unsignedp && integer_all_onesp (arg1))
10064 arg1 = integer_zero_node, code = GE;
10065 else
10066 code = unsignedp ? GTU : GT;
10067 break;
10068 case GE_EXPR:
10069 if (integer_onep (arg1))
10070 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10071 else
10072 code = unsignedp ? GEU : GE;
10073 break;
10074
10075 case UNORDERED_EXPR:
10076 code = UNORDERED;
10077 break;
10078 case ORDERED_EXPR:
10079 code = ORDERED;
10080 break;
10081 case UNLT_EXPR:
10082 code = UNLT;
10083 break;
10084 case UNLE_EXPR:
10085 code = UNLE;
10086 break;
10087 case UNGT_EXPR:
10088 code = UNGT;
10089 break;
10090 case UNGE_EXPR:
10091 code = UNGE;
10092 break;
10093 case UNEQ_EXPR:
10094 code = UNEQ;
10095 break;
10096
10097 default:
10098 abort ();
10099 }
10100
10101 /* Put a constant second. */
10102 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10103 {
10104 tem = arg0; arg0 = arg1; arg1 = tem;
10105 code = swap_condition (code);
10106 }
10107
10108 /* If this is an equality or inequality test of a single bit, we can
10109 do this by shifting the bit being tested to the low-order bit and
10110 masking the result with the constant 1. If the condition was EQ,
10111 we xor it with 1. This does not require an scc insn and is faster
10112 than an scc insn even if we have it. */
10113
10114 if ((code == NE || code == EQ)
10115 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10116 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10117 {
10118 tree inner = TREE_OPERAND (arg0, 0);
10119 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10120 int ops_unsignedp;
10121
10122 /* If INNER is a right shift of a constant and it plus BITNUM does
10123 not overflow, adjust BITNUM and INNER. */
10124
10125 if (TREE_CODE (inner) == RSHIFT_EXPR
10126 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10127 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10128 && bitnum < TYPE_PRECISION (type)
10129 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10130 bitnum - TYPE_PRECISION (type)))
10131 {
10132 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10133 inner = TREE_OPERAND (inner, 0);
10134 }
10135
10136 /* If we are going to be able to omit the AND below, we must do our
10137 operations as unsigned. If we must use the AND, we have a choice.
10138 Normally unsigned is faster, but for some machines signed is. */
10139 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10140 #ifdef LOAD_EXTEND_OP
10141 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10142 #else
10143 : 1
10144 #endif
10145 );
10146
10147 if (! get_subtarget (subtarget)
10148 || GET_MODE (subtarget) != operand_mode
10149 || ! safe_from_p (subtarget, inner, 1))
10150 subtarget = 0;
10151
10152 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10153
10154 if (bitnum != 0)
10155 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10156 size_int (bitnum), subtarget, ops_unsignedp);
10157
10158 if (GET_MODE (op0) != mode)
10159 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10160
10161 if ((code == EQ && ! invert) || (code == NE && invert))
10162 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10163 ops_unsignedp, OPTAB_LIB_WIDEN);
10164
10165 /* Put the AND last so it can combine with more things. */
10166 if (bitnum != TYPE_PRECISION (type) - 1)
10167 op0 = expand_and (op0, const1_rtx, subtarget);
10168
10169 return op0;
10170 }
10171
10172 /* Now see if we are likely to be able to do this. Return if not. */
10173 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10174 return 0;
10175
10176 icode = setcc_gen_code[(int) code];
10177 if (icode == CODE_FOR_nothing
10178 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10179 {
10180 /* We can only do this if it is one of the special cases that
10181 can be handled without an scc insn. */
10182 if ((code == LT && integer_zerop (arg1))
10183 || (! only_cheap && code == GE && integer_zerop (arg1)))
10184 ;
10185 else if (BRANCH_COST >= 0
10186 && ! only_cheap && (code == NE || code == EQ)
10187 && TREE_CODE (type) != REAL_TYPE
10188 && ((abs_optab->handlers[(int) operand_mode].insn_code
10189 != CODE_FOR_nothing)
10190 || (ffs_optab->handlers[(int) operand_mode].insn_code
10191 != CODE_FOR_nothing)))
10192 ;
10193 else
10194 return 0;
10195 }
10196
10197 if (! get_subtarget (target)
10198 || GET_MODE (subtarget) != operand_mode
10199 || ! safe_from_p (subtarget, arg1, 1))
10200 subtarget = 0;
10201
10202 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10203 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10204
10205 if (target == 0)
10206 target = gen_reg_rtx (mode);
10207
10208 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10209 because, if the emit_store_flag does anything it will succeed and
10210 OP0 and OP1 will not be used subsequently. */
10211
10212 result = emit_store_flag (target, code,
10213 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10214 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10215 operand_mode, unsignedp, 1);
10216
10217 if (result)
10218 {
10219 if (invert)
10220 result = expand_binop (mode, xor_optab, result, const1_rtx,
10221 result, 0, OPTAB_LIB_WIDEN);
10222 return result;
10223 }
10224
10225 /* If this failed, we have to do this with set/compare/jump/set code. */
10226 if (GET_CODE (target) != REG
10227 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10228 target = gen_reg_rtx (GET_MODE (target));
10229
10230 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10231 result = compare_from_rtx (op0, op1, code, unsignedp,
10232 operand_mode, NULL_RTX);
10233 if (GET_CODE (result) == CONST_INT)
10234 return (((result == const0_rtx && ! invert)
10235 || (result != const0_rtx && invert))
10236 ? const0_rtx : const1_rtx);
10237
10238 /* The code of RESULT may not match CODE if compare_from_rtx
10239 decided to swap its operands and reverse the original code.
10240
10241 We know that compare_from_rtx returns either a CONST_INT or
10242 a new comparison code, so it is safe to just extract the
10243 code from RESULT. */
10244 code = GET_CODE (result);
10245
10246 label = gen_label_rtx ();
10247 if (bcc_gen_fctn[(int) code] == 0)
10248 abort ();
10249
10250 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10251 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10252 emit_label (label);
10253
10254 return target;
10255 }
10256 \f
10257
10258 /* Stubs in case we haven't got a casesi insn. */
10259 #ifndef HAVE_casesi
10260 # define HAVE_casesi 0
10261 # define gen_casesi(a, b, c, d, e) (0)
10262 # define CODE_FOR_casesi CODE_FOR_nothing
10263 #endif
10264
10265 /* If the machine does not have a case insn that compares the bounds,
10266 this means extra overhead for dispatch tables, which raises the
10267 threshold for using them. */
10268 #ifndef CASE_VALUES_THRESHOLD
10269 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10270 #endif /* CASE_VALUES_THRESHOLD */
10271
10272 unsigned int
10273 case_values_threshold ()
10274 {
10275 return CASE_VALUES_THRESHOLD;
10276 }
10277
10278 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10279 0 otherwise (i.e. if there is no casesi instruction). */
10280 int
10281 try_casesi (index_type, index_expr, minval, range,
10282 table_label, default_label)
10283 tree index_type, index_expr, minval, range;
10284 rtx table_label ATTRIBUTE_UNUSED;
10285 rtx default_label;
10286 {
10287 enum machine_mode index_mode = SImode;
10288 int index_bits = GET_MODE_BITSIZE (index_mode);
10289 rtx op1, op2, index;
10290 enum machine_mode op_mode;
10291
10292 if (! HAVE_casesi)
10293 return 0;
10294
10295 /* Convert the index to SImode. */
10296 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10297 {
10298 enum machine_mode omode = TYPE_MODE (index_type);
10299 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10300
10301 /* We must handle the endpoints in the original mode. */
10302 index_expr = build (MINUS_EXPR, index_type,
10303 index_expr, minval);
10304 minval = integer_zero_node;
10305 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10306 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10307 omode, 1, default_label);
10308 /* Now we can safely truncate. */
10309 index = convert_to_mode (index_mode, index, 0);
10310 }
10311 else
10312 {
10313 if (TYPE_MODE (index_type) != index_mode)
10314 {
10315 index_expr = convert (type_for_size (index_bits, 0),
10316 index_expr);
10317 index_type = TREE_TYPE (index_expr);
10318 }
10319
10320 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10321 }
10322 emit_queue ();
10323 index = protect_from_queue (index, 0);
10324 do_pending_stack_adjust ();
10325
10326 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10327 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10328 (index, op_mode))
10329 index = copy_to_mode_reg (op_mode, index);
10330
10331 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10332
10333 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10334 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10335 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10336 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10337 (op1, op_mode))
10338 op1 = copy_to_mode_reg (op_mode, op1);
10339
10340 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10341
10342 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10343 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10344 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10345 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10346 (op2, op_mode))
10347 op2 = copy_to_mode_reg (op_mode, op2);
10348
10349 emit_jump_insn (gen_casesi (index, op1, op2,
10350 table_label, default_label));
10351 return 1;
10352 }
10353
10354 /* Attempt to generate a tablejump instruction; same concept. */
10355 #ifndef HAVE_tablejump
10356 #define HAVE_tablejump 0
10357 #define gen_tablejump(x, y) (0)
10358 #endif
10359
10360 /* Subroutine of the next function.
10361
10362 INDEX is the value being switched on, with the lowest value
10363 in the table already subtracted.
10364 MODE is its expected mode (needed if INDEX is constant).
10365 RANGE is the length of the jump table.
10366 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10367
10368 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10369 index value is out of range. */
10370
10371 static void
10372 do_tablejump (index, mode, range, table_label, default_label)
10373 rtx index, range, table_label, default_label;
10374 enum machine_mode mode;
10375 {
10376 rtx temp, vector;
10377
10378 /* Do an unsigned comparison (in the proper mode) between the index
10379 expression and the value which represents the length of the range.
10380 Since we just finished subtracting the lower bound of the range
10381 from the index expression, this comparison allows us to simultaneously
10382 check that the original index expression value is both greater than
10383 or equal to the minimum value of the range and less than or equal to
10384 the maximum value of the range. */
10385
10386 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10387 default_label);
10388
10389 /* If index is in range, it must fit in Pmode.
10390 Convert to Pmode so we can index with it. */
10391 if (mode != Pmode)
10392 index = convert_to_mode (Pmode, index, 1);
10393
10394 /* Don't let a MEM slip thru, because then INDEX that comes
10395 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10396 and break_out_memory_refs will go to work on it and mess it up. */
10397 #ifdef PIC_CASE_VECTOR_ADDRESS
10398 if (flag_pic && GET_CODE (index) != REG)
10399 index = copy_to_mode_reg (Pmode, index);
10400 #endif
10401
10402 /* If flag_force_addr were to affect this address
10403 it could interfere with the tricky assumptions made
10404 about addresses that contain label-refs,
10405 which may be valid only very near the tablejump itself. */
10406 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10407 GET_MODE_SIZE, because this indicates how large insns are. The other
10408 uses should all be Pmode, because they are addresses. This code
10409 could fail if addresses and insns are not the same size. */
10410 index = gen_rtx_PLUS (Pmode,
10411 gen_rtx_MULT (Pmode, index,
10412 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10413 gen_rtx_LABEL_REF (Pmode, table_label));
10414 #ifdef PIC_CASE_VECTOR_ADDRESS
10415 if (flag_pic)
10416 index = PIC_CASE_VECTOR_ADDRESS (index);
10417 else
10418 #endif
10419 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10420 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10421 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10422 RTX_UNCHANGING_P (vector) = 1;
10423 convert_move (temp, vector, 0);
10424
10425 emit_jump_insn (gen_tablejump (temp, table_label));
10426
10427 /* If we are generating PIC code or if the table is PC-relative, the
10428 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10429 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10430 emit_barrier ();
10431 }
10432
10433 int
10434 try_tablejump (index_type, index_expr, minval, range,
10435 table_label, default_label)
10436 tree index_type, index_expr, minval, range;
10437 rtx table_label, default_label;
10438 {
10439 rtx index;
10440
10441 if (! HAVE_tablejump)
10442 return 0;
10443
10444 index_expr = fold (build (MINUS_EXPR, index_type,
10445 convert (index_type, index_expr),
10446 convert (index_type, minval)));
10447 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10448 emit_queue ();
10449 index = protect_from_queue (index, 0);
10450 do_pending_stack_adjust ();
10451
10452 do_tablejump (index, TYPE_MODE (index_type),
10453 convert_modes (TYPE_MODE (index_type),
10454 TYPE_MODE (TREE_TYPE (range)),
10455 expand_expr (range, NULL_RTX,
10456 VOIDmode, 0),
10457 TREE_UNSIGNED (TREE_TYPE (range))),
10458 table_label, default_label);
10459 return 1;
10460 }