expr.c (expand_expr): Call convert_modes when turning a large multiply into a small...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
47
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
51
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
60
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
63
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
66
67 #ifdef PUSH_ROUNDING
68
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
72
73 #endif
74
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
82
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
87
88 /* If this is nonzero, we do not bother generating VOLATILE
89 around volatile memory references, and we are willing to
90 output indirect addresses. If cse is to follow, we reject
91 indirect addresses so a useful potential cse is generated;
92 if it is used only once, instruction combination will produce
93 the same indirect address eventually. */
94 int cse_not_expected;
95
96 /* Nonzero to generate code for all the subroutines within an
97 expression before generating the upper levels of the expression.
98 Nowadays this is never zero. */
99 int do_preexpand_calls = 1;
100
101 /* Don't check memory usage, since code is being emitted to check a memory
102 usage. Used when current_function_check_memory_usage is true, to avoid
103 infinite recursion. */
104 static int in_check_memory_usage;
105
106 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
107 static tree placeholder_list = 0;
108
109 /* This structure is used by move_by_pieces to describe the move to
110 be performed. */
111 struct move_by_pieces
112 {
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 rtx from;
118 rtx from_addr;
119 int autinc_from;
120 int explicit_inc_from;
121 unsigned HOST_WIDE_INT len;
122 HOST_WIDE_INT offset;
123 int reverse;
124 };
125
126 /* This structure is used by clear_by_pieces to describe the clear to
127 be performed. */
128
129 struct clear_by_pieces
130 {
131 rtx to;
132 rtx to_addr;
133 int autinc_to;
134 int explicit_inc_to;
135 unsigned HOST_WIDE_INT len;
136 HOST_WIDE_INT offset;
137 int reverse;
138 };
139
140 extern struct obstack permanent_obstack;
141
142 static rtx get_push_address PARAMS ((int));
143
144 static rtx enqueue_insn PARAMS ((rtx, rtx));
145 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
146 PARAMS ((unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
149 struct move_by_pieces *));
150 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
151 unsigned int));
152 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
153 enum machine_mode,
154 struct clear_by_pieces *));
155 static rtx get_subtarget PARAMS ((rtx));
156 static int is_zeros_p PARAMS ((tree));
157 static int mostly_zeros_p PARAMS ((tree));
158 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, tree, unsigned int, int));
161 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
162 HOST_WIDE_INT));
163 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
164 HOST_WIDE_INT, enum machine_mode,
165 tree, enum machine_mode, int,
166 unsigned int, HOST_WIDE_INT, int));
167 static enum memory_use_mode
168 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
169 static tree save_noncopied_parts PARAMS ((tree, tree));
170 static tree init_noncopied_parts PARAMS ((tree, tree));
171 static int safe_from_p PARAMS ((rtx, tree, int));
172 static int fixed_type_p PARAMS ((tree));
173 static rtx var_rtx PARAMS ((tree));
174 static int readonly_fields_p PARAMS ((tree));
175 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
176 static rtx expand_increment PARAMS ((tree, int, int));
177 static void preexpand_calls PARAMS ((tree));
178 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
179 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
180 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
181 rtx, rtx));
182 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
183
184 /* Record for each mode whether we can move a register directly to or
185 from an object of that mode in memory. If we can't, we won't try
186 to use that mode directly when accessing a field of that mode. */
187
188 static char direct_load[NUM_MACHINE_MODES];
189 static char direct_store[NUM_MACHINE_MODES];
190
191 /* If a memory-to-memory move would take MOVE_RATIO or more simple
192 move-instruction sequences, we will do a movstr or libcall instead. */
193
194 #ifndef MOVE_RATIO
195 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
196 #define MOVE_RATIO 2
197 #else
198 /* If we are optimizing for space (-Os), cut down the default move ratio. */
199 #define MOVE_RATIO (optimize_size ? 3 : 15)
200 #endif
201 #endif
202
203 /* This macro is used to determine whether move_by_pieces should be called
204 to perform a structure copy. */
205 #ifndef MOVE_BY_PIECES_P
206 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
207 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
208 #endif
209
210 /* This array records the insn_code of insns to perform block moves. */
211 enum insn_code movstr_optab[NUM_MACHINE_MODES];
212
213 /* This array records the insn_code of insns to perform block clears. */
214 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
215
216 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
217
218 #ifndef SLOW_UNALIGNED_ACCESS
219 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
220 #endif
221 \f
222 /* This is run once per compilation to set up which modes can be used
223 directly in memory and to initialize the block move optab. */
224
225 void
226 init_expr_once ()
227 {
228 rtx insn, pat;
229 enum machine_mode mode;
230 int num_clobbers;
231 rtx mem, mem1;
232 char *free_point;
233
234 start_sequence ();
235
236 /* Since we are on the permanent obstack, we must be sure we save this
237 spot AFTER we call start_sequence, since it will reuse the rtl it
238 makes. */
239 free_point = (char *) oballoc (0);
240
241 /* Try indexing by frame ptr and try by stack ptr.
242 It is known that on the Convex the stack ptr isn't a valid index.
243 With luck, one or the other is valid on any machine. */
244 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
245 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
246
247 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
248 pat = PATTERN (insn);
249
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
252 {
253 int regno;
254 rtx reg;
255
256 direct_load[(int) mode] = direct_store[(int) mode] = 0;
257 PUT_MODE (mem, mode);
258 PUT_MODE (mem1, mode);
259
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
262
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
267 {
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
270
271 reg = gen_rtx_REG (mode, regno);
272
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
277
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
282
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
287
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
292 }
293 }
294
295 end_sequence ();
296 obfree (free_point);
297 }
298
299 /* This is run at the start of compiling a function. */
300
301 void
302 init_expr ()
303 {
304 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
305
306 pending_chain = 0;
307 pending_stack_adjust = 0;
308 stack_pointer_delta = 0;
309 inhibit_defer_pop = 0;
310 saveregs_value = 0;
311 apply_args_value = 0;
312 forced_labels = 0;
313 }
314
315 void
316 mark_expr_status (p)
317 struct expr_status *p;
318 {
319 if (p == NULL)
320 return;
321
322 ggc_mark_rtx (p->x_saveregs_value);
323 ggc_mark_rtx (p->x_apply_args_value);
324 ggc_mark_rtx (p->x_forced_labels);
325 }
326
327 void
328 free_expr_status (f)
329 struct function *f;
330 {
331 free (f->expr);
332 f->expr = NULL;
333 }
334
335 /* Small sanity check that the queue is empty at the end of a function. */
336
337 void
338 finish_expr_for_function ()
339 {
340 if (pending_chain)
341 abort ();
342 }
343 \f
344 /* Manage the queue of increment instructions to be output
345 for POSTINCREMENT_EXPR expressions, etc. */
346
347 /* Queue up to increment (or change) VAR later. BODY says how:
348 BODY should be the same thing you would pass to emit_insn
349 to increment right away. It will go to emit_insn later on.
350
351 The value is a QUEUED expression to be used in place of VAR
352 where you want to guarantee the pre-incrementation value of VAR. */
353
354 static rtx
355 enqueue_insn (var, body)
356 rtx var, body;
357 {
358 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
359 body, pending_chain);
360 return pending_chain;
361 }
362
363 /* Use protect_from_queue to convert a QUEUED expression
364 into something that you can put immediately into an instruction.
365 If the queued incrementation has not happened yet,
366 protect_from_queue returns the variable itself.
367 If the incrementation has happened, protect_from_queue returns a temp
368 that contains a copy of the old value of the variable.
369
370 Any time an rtx which might possibly be a QUEUED is to be put
371 into an instruction, it must be passed through protect_from_queue first.
372 QUEUED expressions are not meaningful in instructions.
373
374 Do not pass a value through protect_from_queue and then hold
375 on to it for a while before putting it in an instruction!
376 If the queue is flushed in between, incorrect code will result. */
377
378 rtx
379 protect_from_queue (x, modify)
380 register rtx x;
381 int modify;
382 {
383 register RTX_CODE code = GET_CODE (x);
384
385 #if 0 /* A QUEUED can hang around after the queue is forced out. */
386 /* Shortcut for most common case. */
387 if (pending_chain == 0)
388 return x;
389 #endif
390
391 if (code != QUEUED)
392 {
393 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
394 use of autoincrement. Make a copy of the contents of the memory
395 location rather than a copy of the address, but not if the value is
396 of mode BLKmode. Don't modify X in place since it might be
397 shared. */
398 if (code == MEM && GET_MODE (x) != BLKmode
399 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
400 {
401 register rtx y = XEXP (x, 0);
402 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
403
404 MEM_COPY_ATTRIBUTES (new, x);
405
406 if (QUEUED_INSN (y))
407 {
408 register rtx temp = gen_reg_rtx (GET_MODE (new));
409 emit_insn_before (gen_move_insn (temp, new),
410 QUEUED_INSN (y));
411 return temp;
412 }
413 return new;
414 }
415 /* Otherwise, recursively protect the subexpressions of all
416 the kinds of rtx's that can contain a QUEUED. */
417 if (code == MEM)
418 {
419 rtx tem = protect_from_queue (XEXP (x, 0), 0);
420 if (tem != XEXP (x, 0))
421 {
422 x = copy_rtx (x);
423 XEXP (x, 0) = tem;
424 }
425 }
426 else if (code == PLUS || code == MULT)
427 {
428 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
429 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
430 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
431 {
432 x = copy_rtx (x);
433 XEXP (x, 0) = new0;
434 XEXP (x, 1) = new1;
435 }
436 }
437 return x;
438 }
439 /* If the increment has not happened, use the variable itself. */
440 if (QUEUED_INSN (x) == 0)
441 return QUEUED_VAR (x);
442 /* If the increment has happened and a pre-increment copy exists,
443 use that copy. */
444 if (QUEUED_COPY (x) != 0)
445 return QUEUED_COPY (x);
446 /* The increment has happened but we haven't set up a pre-increment copy.
447 Set one up now, and use it. */
448 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
449 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
450 QUEUED_INSN (x));
451 return QUEUED_COPY (x);
452 }
453
454 /* Return nonzero if X contains a QUEUED expression:
455 if it contains anything that will be altered by a queued increment.
456 We handle only combinations of MEM, PLUS, MINUS and MULT operators
457 since memory addresses generally contain only those. */
458
459 int
460 queued_subexp_p (x)
461 rtx x;
462 {
463 register enum rtx_code code = GET_CODE (x);
464 switch (code)
465 {
466 case QUEUED:
467 return 1;
468 case MEM:
469 return queued_subexp_p (XEXP (x, 0));
470 case MULT:
471 case PLUS:
472 case MINUS:
473 return (queued_subexp_p (XEXP (x, 0))
474 || queued_subexp_p (XEXP (x, 1)));
475 default:
476 return 0;
477 }
478 }
479
480 /* Perform all the pending incrementations. */
481
482 void
483 emit_queue ()
484 {
485 register rtx p;
486 while ((p = pending_chain))
487 {
488 rtx body = QUEUED_BODY (p);
489
490 if (GET_CODE (body) == SEQUENCE)
491 {
492 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
493 emit_insn (QUEUED_BODY (p));
494 }
495 else
496 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
497 pending_chain = QUEUED_NEXT (p);
498 }
499 }
500 \f
501 /* Copy data from FROM to TO, where the machine modes are not the same.
502 Both modes may be integer, or both may be floating.
503 UNSIGNEDP should be nonzero if FROM is an unsigned type.
504 This causes zero-extension instead of sign-extension. */
505
506 void
507 convert_move (to, from, unsignedp)
508 register rtx to, from;
509 int unsignedp;
510 {
511 enum machine_mode to_mode = GET_MODE (to);
512 enum machine_mode from_mode = GET_MODE (from);
513 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
514 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
515 enum insn_code code;
516 rtx libcall;
517
518 /* rtx code for making an equivalent value. */
519 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
520
521 to = protect_from_queue (to, 1);
522 from = protect_from_queue (from, 0);
523
524 if (to_real != from_real)
525 abort ();
526
527 /* If FROM is a SUBREG that indicates that we have already done at least
528 the required extension, strip it. We don't handle such SUBREGs as
529 TO here. */
530
531 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
532 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
533 >= GET_MODE_SIZE (to_mode))
534 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
535 from = gen_lowpart (to_mode, from), from_mode = to_mode;
536
537 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
538 abort ();
539
540 if (to_mode == from_mode
541 || (from_mode == VOIDmode && CONSTANT_P (from)))
542 {
543 emit_move_insn (to, from);
544 return;
545 }
546
547 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
548 {
549 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
550 abort ();
551
552 if (VECTOR_MODE_P (to_mode))
553 from = gen_rtx_SUBREG (to_mode, from, 0);
554 else
555 to = gen_rtx_SUBREG (from_mode, to, 0);
556
557 emit_move_insn (to, from);
558 return;
559 }
560
561 if (to_real != from_real)
562 abort ();
563
564 if (to_real)
565 {
566 rtx value;
567
568 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
569 {
570 /* Try converting directly if the insn is supported. */
571 if ((code = can_extend_p (to_mode, from_mode, 0))
572 != CODE_FOR_nothing)
573 {
574 emit_unop_insn (code, to, from, UNKNOWN);
575 return;
576 }
577 }
578
579 #ifdef HAVE_trunchfqf2
580 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_trunctqfqf2
587 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593 #ifdef HAVE_truncsfqf2
594 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
597 return;
598 }
599 #endif
600 #ifdef HAVE_truncdfqf2
601 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
604 return;
605 }
606 #endif
607 #ifdef HAVE_truncxfqf2
608 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
609 {
610 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
611 return;
612 }
613 #endif
614 #ifdef HAVE_trunctfqf2
615 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
616 {
617 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
618 return;
619 }
620 #endif
621
622 #ifdef HAVE_trunctqfhf2
623 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629 #ifdef HAVE_truncsfhf2
630 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
633 return;
634 }
635 #endif
636 #ifdef HAVE_truncdfhf2
637 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
638 {
639 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
640 return;
641 }
642 #endif
643 #ifdef HAVE_truncxfhf2
644 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
645 {
646 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
647 return;
648 }
649 #endif
650 #ifdef HAVE_trunctfhf2
651 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
652 {
653 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
654 return;
655 }
656 #endif
657
658 #ifdef HAVE_truncsftqf2
659 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
660 {
661 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665 #ifdef HAVE_truncdftqf2
666 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
667 {
668 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
669 return;
670 }
671 #endif
672 #ifdef HAVE_truncxftqf2
673 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
674 {
675 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
676 return;
677 }
678 #endif
679 #ifdef HAVE_trunctftqf2
680 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
681 {
682 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
683 return;
684 }
685 #endif
686
687 #ifdef HAVE_truncdfsf2
688 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
689 {
690 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_truncxfsf2
695 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
696 {
697 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701 #ifdef HAVE_trunctfsf2
702 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
703 {
704 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
705 return;
706 }
707 #endif
708 #ifdef HAVE_truncxfdf2
709 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
710 {
711 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
712 return;
713 }
714 #endif
715 #ifdef HAVE_trunctfdf2
716 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
717 {
718 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
719 return;
720 }
721 #endif
722
723 libcall = (rtx) 0;
724 switch (from_mode)
725 {
726 case SFmode:
727 switch (to_mode)
728 {
729 case DFmode:
730 libcall = extendsfdf2_libfunc;
731 break;
732
733 case XFmode:
734 libcall = extendsfxf2_libfunc;
735 break;
736
737 case TFmode:
738 libcall = extendsftf2_libfunc;
739 break;
740
741 default:
742 break;
743 }
744 break;
745
746 case DFmode:
747 switch (to_mode)
748 {
749 case SFmode:
750 libcall = truncdfsf2_libfunc;
751 break;
752
753 case XFmode:
754 libcall = extenddfxf2_libfunc;
755 break;
756
757 case TFmode:
758 libcall = extenddftf2_libfunc;
759 break;
760
761 default:
762 break;
763 }
764 break;
765
766 case XFmode:
767 switch (to_mode)
768 {
769 case SFmode:
770 libcall = truncxfsf2_libfunc;
771 break;
772
773 case DFmode:
774 libcall = truncxfdf2_libfunc;
775 break;
776
777 default:
778 break;
779 }
780 break;
781
782 case TFmode:
783 switch (to_mode)
784 {
785 case SFmode:
786 libcall = trunctfsf2_libfunc;
787 break;
788
789 case DFmode:
790 libcall = trunctfdf2_libfunc;
791 break;
792
793 default:
794 break;
795 }
796 break;
797
798 default:
799 break;
800 }
801
802 if (libcall == (rtx) 0)
803 /* This conversion is not implemented yet. */
804 abort ();
805
806 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
807 1, from, from_mode);
808 emit_move_insn (to, value);
809 return;
810 }
811
812 /* Now both modes are integers. */
813
814 /* Handle expanding beyond a word. */
815 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
816 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
817 {
818 rtx insns;
819 rtx lowpart;
820 rtx fill_value;
821 rtx lowfrom;
822 int i;
823 enum machine_mode lowpart_mode;
824 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
825
826 /* Try converting directly if the insn is supported. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
829 {
830 /* If FROM is a SUBREG, put it into a register. Do this
831 so that we always generate the same set of insns for
832 better cse'ing; if an intermediate assignment occurred,
833 we won't be doing the operation directly on the SUBREG. */
834 if (optimize > 0 && GET_CODE (from) == SUBREG)
835 from = force_reg (from_mode, from);
836 emit_unop_insn (code, to, from, equiv_code);
837 return;
838 }
839 /* Next, try converting via full word. */
840 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
841 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
842 != CODE_FOR_nothing))
843 {
844 if (GET_CODE (to) == REG)
845 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
846 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
847 emit_unop_insn (code, to,
848 gen_lowpart (word_mode, to), equiv_code);
849 return;
850 }
851
852 /* No special multiword conversion insn; do it by hand. */
853 start_sequence ();
854
855 /* Since we will turn this into a no conflict block, we must ensure
856 that the source does not overlap the target. */
857
858 if (reg_overlap_mentioned_p (to, from))
859 from = force_reg (from_mode, from);
860
861 /* Get a copy of FROM widened to a word, if necessary. */
862 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
863 lowpart_mode = word_mode;
864 else
865 lowpart_mode = from_mode;
866
867 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
868
869 lowpart = gen_lowpart (lowpart_mode, to);
870 emit_move_insn (lowpart, lowfrom);
871
872 /* Compute the value to put in each remaining word. */
873 if (unsignedp)
874 fill_value = const0_rtx;
875 else
876 {
877 #ifdef HAVE_slt
878 if (HAVE_slt
879 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
880 && STORE_FLAG_VALUE == -1)
881 {
882 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
883 lowpart_mode, 0, 0);
884 fill_value = gen_reg_rtx (word_mode);
885 emit_insn (gen_slt (fill_value));
886 }
887 else
888 #endif
889 {
890 fill_value
891 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
892 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
893 NULL_RTX, 0);
894 fill_value = convert_to_mode (word_mode, fill_value, 1);
895 }
896 }
897
898 /* Fill the remaining words. */
899 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
900 {
901 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
902 rtx subword = operand_subword (to, index, 1, to_mode);
903
904 if (subword == 0)
905 abort ();
906
907 if (fill_value != subword)
908 emit_move_insn (subword, fill_value);
909 }
910
911 insns = get_insns ();
912 end_sequence ();
913
914 emit_no_conflict_block (insns, to, from, NULL_RTX,
915 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
916 return;
917 }
918
919 /* Truncating multi-word to a word or less. */
920 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
921 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
922 {
923 if (!((GET_CODE (from) == MEM
924 && ! MEM_VOLATILE_P (from)
925 && direct_load[(int) to_mode]
926 && ! mode_dependent_address_p (XEXP (from, 0)))
927 || GET_CODE (from) == REG
928 || GET_CODE (from) == SUBREG))
929 from = force_reg (from_mode, from);
930 convert_move (to, gen_lowpart (word_mode, from), 0);
931 return;
932 }
933
934 /* Handle pointer conversion. */ /* SPEE 900220. */
935 if (to_mode == PQImode)
936 {
937 if (from_mode != QImode)
938 from = convert_to_mode (QImode, from, unsignedp);
939
940 #ifdef HAVE_truncqipqi2
941 if (HAVE_truncqipqi2)
942 {
943 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 return;
945 }
946 #endif /* HAVE_truncqipqi2 */
947 abort ();
948 }
949
950 if (from_mode == PQImode)
951 {
952 if (to_mode != QImode)
953 {
954 from = convert_to_mode (QImode, from, unsignedp);
955 from_mode = QImode;
956 }
957 else
958 {
959 #ifdef HAVE_extendpqiqi2
960 if (HAVE_extendpqiqi2)
961 {
962 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 return;
964 }
965 #endif /* HAVE_extendpqiqi2 */
966 abort ();
967 }
968 }
969
970 if (to_mode == PSImode)
971 {
972 if (from_mode != SImode)
973 from = convert_to_mode (SImode, from, unsignedp);
974
975 #ifdef HAVE_truncsipsi2
976 if (HAVE_truncsipsi2)
977 {
978 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 return;
980 }
981 #endif /* HAVE_truncsipsi2 */
982 abort ();
983 }
984
985 if (from_mode == PSImode)
986 {
987 if (to_mode != SImode)
988 {
989 from = convert_to_mode (SImode, from, unsignedp);
990 from_mode = SImode;
991 }
992 else
993 {
994 #ifdef HAVE_extendpsisi2
995 if (HAVE_extendpsisi2)
996 {
997 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 return;
999 }
1000 #endif /* HAVE_extendpsisi2 */
1001 abort ();
1002 }
1003 }
1004
1005 if (to_mode == PDImode)
1006 {
1007 if (from_mode != DImode)
1008 from = convert_to_mode (DImode, from, unsignedp);
1009
1010 #ifdef HAVE_truncdipdi2
1011 if (HAVE_truncdipdi2)
1012 {
1013 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1014 return;
1015 }
1016 #endif /* HAVE_truncdipdi2 */
1017 abort ();
1018 }
1019
1020 if (from_mode == PDImode)
1021 {
1022 if (to_mode != DImode)
1023 {
1024 from = convert_to_mode (DImode, from, unsignedp);
1025 from_mode = DImode;
1026 }
1027 else
1028 {
1029 #ifdef HAVE_extendpdidi2
1030 if (HAVE_extendpdidi2)
1031 {
1032 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1033 return;
1034 }
1035 #endif /* HAVE_extendpdidi2 */
1036 abort ();
1037 }
1038 }
1039
1040 /* Now follow all the conversions between integers
1041 no more than a word long. */
1042
1043 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1044 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1045 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1046 GET_MODE_BITSIZE (from_mode)))
1047 {
1048 if (!((GET_CODE (from) == MEM
1049 && ! MEM_VOLATILE_P (from)
1050 && direct_load[(int) to_mode]
1051 && ! mode_dependent_address_p (XEXP (from, 0)))
1052 || GET_CODE (from) == REG
1053 || GET_CODE (from) == SUBREG))
1054 from = force_reg (from_mode, from);
1055 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1056 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1057 from = copy_to_reg (from);
1058 emit_move_insn (to, gen_lowpart (to_mode, from));
1059 return;
1060 }
1061
1062 /* Handle extension. */
1063 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1064 {
1065 /* Convert directly if that works. */
1066 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1067 != CODE_FOR_nothing)
1068 {
1069 emit_unop_insn (code, to, from, equiv_code);
1070 return;
1071 }
1072 else
1073 {
1074 enum machine_mode intermediate;
1075 rtx tmp;
1076 tree shift_amount;
1077
1078 /* Search for a mode to convert via. */
1079 for (intermediate = from_mode; intermediate != VOIDmode;
1080 intermediate = GET_MODE_WIDER_MODE (intermediate))
1081 if (((can_extend_p (to_mode, intermediate, unsignedp)
1082 != CODE_FOR_nothing)
1083 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1084 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1085 GET_MODE_BITSIZE (intermediate))))
1086 && (can_extend_p (intermediate, from_mode, unsignedp)
1087 != CODE_FOR_nothing))
1088 {
1089 convert_move (to, convert_to_mode (intermediate, from,
1090 unsignedp), unsignedp);
1091 return;
1092 }
1093
1094 /* No suitable intermediate mode.
1095 Generate what we need with shifts. */
1096 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1097 - GET_MODE_BITSIZE (from_mode), 0);
1098 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1099 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1100 to, unsignedp);
1101 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1102 to, unsignedp);
1103 if (tmp != to)
1104 emit_move_insn (to, tmp);
1105 return;
1106 }
1107 }
1108
1109 /* Support special truncate insns for certain modes. */
1110
1111 if (from_mode == DImode && to_mode == SImode)
1112 {
1113 #ifdef HAVE_truncdisi2
1114 if (HAVE_truncdisi2)
1115 {
1116 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1117 return;
1118 }
1119 #endif
1120 convert_move (to, force_reg (from_mode, from), unsignedp);
1121 return;
1122 }
1123
1124 if (from_mode == DImode && to_mode == HImode)
1125 {
1126 #ifdef HAVE_truncdihi2
1127 if (HAVE_truncdihi2)
1128 {
1129 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1130 return;
1131 }
1132 #endif
1133 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 return;
1135 }
1136
1137 if (from_mode == DImode && to_mode == QImode)
1138 {
1139 #ifdef HAVE_truncdiqi2
1140 if (HAVE_truncdiqi2)
1141 {
1142 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1143 return;
1144 }
1145 #endif
1146 convert_move (to, force_reg (from_mode, from), unsignedp);
1147 return;
1148 }
1149
1150 if (from_mode == SImode && to_mode == HImode)
1151 {
1152 #ifdef HAVE_truncsihi2
1153 if (HAVE_truncsihi2)
1154 {
1155 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1156 return;
1157 }
1158 #endif
1159 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 return;
1161 }
1162
1163 if (from_mode == SImode && to_mode == QImode)
1164 {
1165 #ifdef HAVE_truncsiqi2
1166 if (HAVE_truncsiqi2)
1167 {
1168 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1169 return;
1170 }
1171 #endif
1172 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 return;
1174 }
1175
1176 if (from_mode == HImode && to_mode == QImode)
1177 {
1178 #ifdef HAVE_trunchiqi2
1179 if (HAVE_trunchiqi2)
1180 {
1181 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1182 return;
1183 }
1184 #endif
1185 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 return;
1187 }
1188
1189 if (from_mode == TImode && to_mode == DImode)
1190 {
1191 #ifdef HAVE_trunctidi2
1192 if (HAVE_trunctidi2)
1193 {
1194 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1195 return;
1196 }
1197 #endif
1198 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 return;
1200 }
1201
1202 if (from_mode == TImode && to_mode == SImode)
1203 {
1204 #ifdef HAVE_trunctisi2
1205 if (HAVE_trunctisi2)
1206 {
1207 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1208 return;
1209 }
1210 #endif
1211 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 return;
1213 }
1214
1215 if (from_mode == TImode && to_mode == HImode)
1216 {
1217 #ifdef HAVE_trunctihi2
1218 if (HAVE_trunctihi2)
1219 {
1220 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1221 return;
1222 }
1223 #endif
1224 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 return;
1226 }
1227
1228 if (from_mode == TImode && to_mode == QImode)
1229 {
1230 #ifdef HAVE_trunctiqi2
1231 if (HAVE_trunctiqi2)
1232 {
1233 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1234 return;
1235 }
1236 #endif
1237 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 return;
1239 }
1240
1241 /* Handle truncation of volatile memrefs, and so on;
1242 the things that couldn't be truncated directly,
1243 and for which there was no special instruction. */
1244 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1245 {
1246 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1247 emit_move_insn (to, temp);
1248 return;
1249 }
1250
1251 /* Mode combination is not recognized. */
1252 abort ();
1253 }
1254
1255 /* Return an rtx for a value that would result
1256 from converting X to mode MODE.
1257 Both X and MODE may be floating, or both integer.
1258 UNSIGNEDP is nonzero if X is an unsigned value.
1259 This can be done by referring to a part of X in place
1260 or by copying to a new temporary with conversion.
1261
1262 This function *must not* call protect_from_queue
1263 except when putting X into an insn (in which case convert_move does it). */
1264
1265 rtx
1266 convert_to_mode (mode, x, unsignedp)
1267 enum machine_mode mode;
1268 rtx x;
1269 int unsignedp;
1270 {
1271 return convert_modes (mode, VOIDmode, x, unsignedp);
1272 }
1273
1274 /* Return an rtx for a value that would result
1275 from converting X from mode OLDMODE to mode MODE.
1276 Both modes may be floating, or both integer.
1277 UNSIGNEDP is nonzero if X is an unsigned value.
1278
1279 This can be done by referring to a part of X in place
1280 or by copying to a new temporary with conversion.
1281
1282 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1283
1284 This function *must not* call protect_from_queue
1285 except when putting X into an insn (in which case convert_move does it). */
1286
1287 rtx
1288 convert_modes (mode, oldmode, x, unsignedp)
1289 enum machine_mode mode, oldmode;
1290 rtx x;
1291 int unsignedp;
1292 {
1293 register rtx temp;
1294
1295 /* If FROM is a SUBREG that indicates that we have already done at least
1296 the required extension, strip it. */
1297
1298 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1299 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1300 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1301 x = gen_lowpart (mode, x);
1302
1303 if (GET_MODE (x) != VOIDmode)
1304 oldmode = GET_MODE (x);
1305
1306 if (mode == oldmode)
1307 return x;
1308
1309 /* There is one case that we must handle specially: If we are converting
1310 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1311 we are to interpret the constant as unsigned, gen_lowpart will do
1312 the wrong if the constant appears negative. What we want to do is
1313 make the high-order word of the constant zero, not all ones. */
1314
1315 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1316 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1317 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1318 {
1319 HOST_WIDE_INT val = INTVAL (x);
1320
1321 if (oldmode != VOIDmode
1322 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1323 {
1324 int width = GET_MODE_BITSIZE (oldmode);
1325
1326 /* We need to zero extend VAL. */
1327 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1328 }
1329
1330 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1331 }
1332
1333 /* We can do this with a gen_lowpart if both desired and current modes
1334 are integer, and this is either a constant integer, a register, or a
1335 non-volatile MEM. Except for the constant case where MODE is no
1336 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1337
1338 if ((GET_CODE (x) == CONST_INT
1339 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1340 || (GET_MODE_CLASS (mode) == MODE_INT
1341 && GET_MODE_CLASS (oldmode) == MODE_INT
1342 && (GET_CODE (x) == CONST_DOUBLE
1343 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1344 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1345 && direct_load[(int) mode])
1346 || (GET_CODE (x) == REG
1347 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1348 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1349 {
1350 /* ?? If we don't know OLDMODE, we have to assume here that
1351 X does not need sign- or zero-extension. This may not be
1352 the case, but it's the best we can do. */
1353 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1354 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1355 {
1356 HOST_WIDE_INT val = INTVAL (x);
1357 int width = GET_MODE_BITSIZE (oldmode);
1358
1359 /* We must sign or zero-extend in this case. Start by
1360 zero-extending, then sign extend if we need to. */
1361 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1362 if (! unsignedp
1363 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1364 val |= (HOST_WIDE_INT) (-1) << width;
1365
1366 return GEN_INT (val);
1367 }
1368
1369 return gen_lowpart (mode, x);
1370 }
1371
1372 temp = gen_reg_rtx (mode);
1373 convert_move (temp, x, unsignedp);
1374 return temp;
1375 }
1376 \f
1377 /* This macro is used to determine what the largest unit size that
1378 move_by_pieces can use is. */
1379
1380 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1381 move efficiently, as opposed to MOVE_MAX which is the maximum
1382 number of bytes we can move with a single instruction. */
1383
1384 #ifndef MOVE_MAX_PIECES
1385 #define MOVE_MAX_PIECES MOVE_MAX
1386 #endif
1387
1388 /* Generate several move instructions to copy LEN bytes
1389 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1390 The caller must pass FROM and TO
1391 through protect_from_queue before calling.
1392 ALIGN is maximum alignment we can assume. */
1393
1394 void
1395 move_by_pieces (to, from, len, align)
1396 rtx to, from;
1397 unsigned HOST_WIDE_INT len;
1398 unsigned int align;
1399 {
1400 struct move_by_pieces data;
1401 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1402 unsigned int max_size = MOVE_MAX_PIECES + 1;
1403 enum machine_mode mode = VOIDmode, tmode;
1404 enum insn_code icode;
1405
1406 data.offset = 0;
1407 data.to_addr = to_addr;
1408 data.from_addr = from_addr;
1409 data.to = to;
1410 data.from = from;
1411 data.autinc_to
1412 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1413 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1414 data.autinc_from
1415 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1416 || GET_CODE (from_addr) == POST_INC
1417 || GET_CODE (from_addr) == POST_DEC);
1418
1419 data.explicit_inc_from = 0;
1420 data.explicit_inc_to = 0;
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1423 if (data.reverse) data.offset = len;
1424 data.len = len;
1425
1426 /* If copying requires more than two move insns,
1427 copy addresses to registers (to make displacements shorter)
1428 and use post-increment if available. */
1429 if (!(data.autinc_from && data.autinc_to)
1430 && move_by_pieces_ninsns (len, align) > 2)
1431 {
1432 /* Find the mode of the largest move... */
1433 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1434 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1435 if (GET_MODE_SIZE (tmode) < max_size)
1436 mode = tmode;
1437
1438 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1439 {
1440 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1441 data.autinc_from = 1;
1442 data.explicit_inc_from = -1;
1443 }
1444 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1445 {
1446 data.from_addr = copy_addr_to_reg (from_addr);
1447 data.autinc_from = 1;
1448 data.explicit_inc_from = 1;
1449 }
1450 if (!data.autinc_from && CONSTANT_P (from_addr))
1451 data.from_addr = copy_addr_to_reg (from_addr);
1452 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1453 {
1454 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1455 data.autinc_to = 1;
1456 data.explicit_inc_to = -1;
1457 }
1458 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1459 {
1460 data.to_addr = copy_addr_to_reg (to_addr);
1461 data.autinc_to = 1;
1462 data.explicit_inc_to = 1;
1463 }
1464 if (!data.autinc_to && CONSTANT_P (to_addr))
1465 data.to_addr = copy_addr_to_reg (to_addr);
1466 }
1467
1468 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1469 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1470 align = MOVE_MAX * BITS_PER_UNIT;
1471
1472 /* First move what we can in the largest integer mode, then go to
1473 successively smaller modes. */
1474
1475 while (max_size > 1)
1476 {
1477 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1478 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1479 if (GET_MODE_SIZE (tmode) < max_size)
1480 mode = tmode;
1481
1482 if (mode == VOIDmode)
1483 break;
1484
1485 icode = mov_optab->handlers[(int) mode].insn_code;
1486 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1487 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1488
1489 max_size = GET_MODE_SIZE (mode);
1490 }
1491
1492 /* The code above should have handled everything. */
1493 if (data.len > 0)
1494 abort ();
1495 }
1496
1497 /* Return number of insns required to move L bytes by pieces.
1498 ALIGN (in bytes) is maximum alignment we can assume. */
1499
1500 static unsigned HOST_WIDE_INT
1501 move_by_pieces_ninsns (l, align)
1502 unsigned HOST_WIDE_INT l;
1503 unsigned int align;
1504 {
1505 unsigned HOST_WIDE_INT n_insns = 0;
1506 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1507
1508 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1509 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1510 align = MOVE_MAX * BITS_PER_UNIT;
1511
1512 while (max_size > 1)
1513 {
1514 enum machine_mode mode = VOIDmode, tmode;
1515 enum insn_code icode;
1516
1517 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1518 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1519 if (GET_MODE_SIZE (tmode) < max_size)
1520 mode = tmode;
1521
1522 if (mode == VOIDmode)
1523 break;
1524
1525 icode = mov_optab->handlers[(int) mode].insn_code;
1526 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1527 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1528
1529 max_size = GET_MODE_SIZE (mode);
1530 }
1531
1532 return n_insns;
1533 }
1534
1535 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1536 with move instructions for mode MODE. GENFUN is the gen_... function
1537 to make a move insn for that mode. DATA has all the other info. */
1538
1539 static void
1540 move_by_pieces_1 (genfun, mode, data)
1541 rtx (*genfun) PARAMS ((rtx, ...));
1542 enum machine_mode mode;
1543 struct move_by_pieces *data;
1544 {
1545 unsigned int size = GET_MODE_SIZE (mode);
1546 rtx to1, from1;
1547
1548 while (data->len >= size)
1549 {
1550 if (data->reverse)
1551 data->offset -= size;
1552
1553 if (data->autinc_to)
1554 {
1555 to1 = gen_rtx_MEM (mode, data->to_addr);
1556 MEM_COPY_ATTRIBUTES (to1, data->to);
1557 }
1558 else
1559 to1 = change_address (data->to, mode,
1560 plus_constant (data->to_addr, data->offset));
1561
1562 if (data->autinc_from)
1563 {
1564 from1 = gen_rtx_MEM (mode, data->from_addr);
1565 MEM_COPY_ATTRIBUTES (from1, data->from);
1566 }
1567 else
1568 from1 = change_address (data->from, mode,
1569 plus_constant (data->from_addr, data->offset));
1570
1571 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1572 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1573 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1574 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1575
1576 emit_insn ((*genfun) (to1, from1));
1577
1578 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1579 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1580 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1581 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1582
1583 if (! data->reverse)
1584 data->offset += size;
1585
1586 data->len -= size;
1587 }
1588 }
1589 \f
1590 /* Emit code to move a block Y to a block X.
1591 This may be done with string-move instructions,
1592 with multiple scalar move instructions, or with a library call.
1593
1594 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1595 with mode BLKmode.
1596 SIZE is an rtx that says how long they are.
1597 ALIGN is the maximum alignment we can assume they have.
1598
1599 Return the address of the new block, if memcpy is called and returns it,
1600 0 otherwise. */
1601
1602 rtx
1603 emit_block_move (x, y, size, align)
1604 rtx x, y;
1605 rtx size;
1606 unsigned int align;
1607 {
1608 rtx retval = 0;
1609 #ifdef TARGET_MEM_FUNCTIONS
1610 static tree fn;
1611 tree call_expr, arg_list;
1612 #endif
1613
1614 if (GET_MODE (x) != BLKmode)
1615 abort ();
1616
1617 if (GET_MODE (y) != BLKmode)
1618 abort ();
1619
1620 x = protect_from_queue (x, 1);
1621 y = protect_from_queue (y, 0);
1622 size = protect_from_queue (size, 0);
1623
1624 if (GET_CODE (x) != MEM)
1625 abort ();
1626 if (GET_CODE (y) != MEM)
1627 abort ();
1628 if (size == 0)
1629 abort ();
1630
1631 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1632 move_by_pieces (x, y, INTVAL (size), align);
1633 else
1634 {
1635 /* Try the most limited insn first, because there's no point
1636 including more than one in the machine description unless
1637 the more limited one has some advantage. */
1638
1639 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1640 enum machine_mode mode;
1641
1642 /* Since this is a move insn, we don't care about volatility. */
1643 volatile_ok = 1;
1644
1645 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1646 mode = GET_MODE_WIDER_MODE (mode))
1647 {
1648 enum insn_code code = movstr_optab[(int) mode];
1649 insn_operand_predicate_fn pred;
1650
1651 if (code != CODE_FOR_nothing
1652 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1653 here because if SIZE is less than the mode mask, as it is
1654 returned by the macro, it will definitely be less than the
1655 actual mode mask. */
1656 && ((GET_CODE (size) == CONST_INT
1657 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1658 <= (GET_MODE_MASK (mode) >> 1)))
1659 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1660 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1661 || (*pred) (x, BLKmode))
1662 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1663 || (*pred) (y, BLKmode))
1664 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1665 || (*pred) (opalign, VOIDmode)))
1666 {
1667 rtx op2;
1668 rtx last = get_last_insn ();
1669 rtx pat;
1670
1671 op2 = convert_to_mode (mode, size, 1);
1672 pred = insn_data[(int) code].operand[2].predicate;
1673 if (pred != 0 && ! (*pred) (op2, mode))
1674 op2 = copy_to_mode_reg (mode, op2);
1675
1676 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1677 if (pat)
1678 {
1679 emit_insn (pat);
1680 volatile_ok = 0;
1681 return 0;
1682 }
1683 else
1684 delete_insns_since (last);
1685 }
1686 }
1687
1688 volatile_ok = 0;
1689
1690 /* X, Y, or SIZE may have been passed through protect_from_queue.
1691
1692 It is unsafe to save the value generated by protect_from_queue
1693 and reuse it later. Consider what happens if emit_queue is
1694 called before the return value from protect_from_queue is used.
1695
1696 Expansion of the CALL_EXPR below will call emit_queue before
1697 we are finished emitting RTL for argument setup. So if we are
1698 not careful we could get the wrong value for an argument.
1699
1700 To avoid this problem we go ahead and emit code to copy X, Y &
1701 SIZE into new pseudos. We can then place those new pseudos
1702 into an RTL_EXPR and use them later, even after a call to
1703 emit_queue.
1704
1705 Note this is not strictly needed for library calls since they
1706 do not call emit_queue before loading their arguments. However,
1707 we may need to have library calls call emit_queue in the future
1708 since failing to do so could cause problems for targets which
1709 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1710 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1711 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1712
1713 #ifdef TARGET_MEM_FUNCTIONS
1714 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1715 #else
1716 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1717 TREE_UNSIGNED (integer_type_node));
1718 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1719 #endif
1720
1721 #ifdef TARGET_MEM_FUNCTIONS
1722 /* It is incorrect to use the libcall calling conventions to call
1723 memcpy in this context.
1724
1725 This could be a user call to memcpy and the user may wish to
1726 examine the return value from memcpy.
1727
1728 For targets where libcalls and normal calls have different conventions
1729 for returning pointers, we could end up generating incorrect code.
1730
1731 So instead of using a libcall sequence we build up a suitable
1732 CALL_EXPR and expand the call in the normal fashion. */
1733 if (fn == NULL_TREE)
1734 {
1735 tree fntype;
1736
1737 /* This was copied from except.c, I don't know if all this is
1738 necessary in this context or not. */
1739 fn = get_identifier ("memcpy");
1740 push_obstacks_nochange ();
1741 end_temporary_allocation ();
1742 fntype = build_pointer_type (void_type_node);
1743 fntype = build_function_type (fntype, NULL_TREE);
1744 fn = build_decl (FUNCTION_DECL, fn, fntype);
1745 ggc_add_tree_root (&fn, 1);
1746 DECL_EXTERNAL (fn) = 1;
1747 TREE_PUBLIC (fn) = 1;
1748 DECL_ARTIFICIAL (fn) = 1;
1749 make_decl_rtl (fn, NULL_PTR, 1);
1750 assemble_external (fn);
1751 pop_obstacks ();
1752 }
1753
1754 /* We need to make an argument list for the function call.
1755
1756 memcpy has three arguments, the first two are void * addresses and
1757 the last is a size_t byte count for the copy. */
1758 arg_list
1759 = build_tree_list (NULL_TREE,
1760 make_tree (build_pointer_type (void_type_node), x));
1761 TREE_CHAIN (arg_list)
1762 = build_tree_list (NULL_TREE,
1763 make_tree (build_pointer_type (void_type_node), y));
1764 TREE_CHAIN (TREE_CHAIN (arg_list))
1765 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1766 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1767
1768 /* Now we have to build up the CALL_EXPR itself. */
1769 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1770 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1771 call_expr, arg_list, NULL_TREE);
1772 TREE_SIDE_EFFECTS (call_expr) = 1;
1773
1774 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1775 #else
1776 emit_library_call (bcopy_libfunc, 0,
1777 VOIDmode, 3, y, Pmode, x, Pmode,
1778 convert_to_mode (TYPE_MODE (integer_type_node), size,
1779 TREE_UNSIGNED (integer_type_node)),
1780 TYPE_MODE (integer_type_node));
1781 #endif
1782 }
1783
1784 return retval;
1785 }
1786 \f
1787 /* Copy all or part of a value X into registers starting at REGNO.
1788 The number of registers to be filled is NREGS. */
1789
1790 void
1791 move_block_to_reg (regno, x, nregs, mode)
1792 int regno;
1793 rtx x;
1794 int nregs;
1795 enum machine_mode mode;
1796 {
1797 int i;
1798 #ifdef HAVE_load_multiple
1799 rtx pat;
1800 rtx last;
1801 #endif
1802
1803 if (nregs == 0)
1804 return;
1805
1806 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1807 x = validize_mem (force_const_mem (mode, x));
1808
1809 /* See if the machine can do this with a load multiple insn. */
1810 #ifdef HAVE_load_multiple
1811 if (HAVE_load_multiple)
1812 {
1813 last = get_last_insn ();
1814 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1815 GEN_INT (nregs));
1816 if (pat)
1817 {
1818 emit_insn (pat);
1819 return;
1820 }
1821 else
1822 delete_insns_since (last);
1823 }
1824 #endif
1825
1826 for (i = 0; i < nregs; i++)
1827 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1828 operand_subword_force (x, i, mode));
1829 }
1830
1831 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1832 The number of registers to be filled is NREGS. SIZE indicates the number
1833 of bytes in the object X. */
1834
1835 void
1836 move_block_from_reg (regno, x, nregs, size)
1837 int regno;
1838 rtx x;
1839 int nregs;
1840 int size;
1841 {
1842 int i;
1843 #ifdef HAVE_store_multiple
1844 rtx pat;
1845 rtx last;
1846 #endif
1847 enum machine_mode mode;
1848
1849 /* If SIZE is that of a mode no bigger than a word, just use that
1850 mode's store operation. */
1851 if (size <= UNITS_PER_WORD
1852 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1853 {
1854 emit_move_insn (change_address (x, mode, NULL),
1855 gen_rtx_REG (mode, regno));
1856 return;
1857 }
1858
1859 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1860 to the left before storing to memory. Note that the previous test
1861 doesn't handle all cases (e.g. SIZE == 3). */
1862 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1863 {
1864 rtx tem = operand_subword (x, 0, 1, BLKmode);
1865 rtx shift;
1866
1867 if (tem == 0)
1868 abort ();
1869
1870 shift = expand_shift (LSHIFT_EXPR, word_mode,
1871 gen_rtx_REG (word_mode, regno),
1872 build_int_2 ((UNITS_PER_WORD - size)
1873 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1874 emit_move_insn (tem, shift);
1875 return;
1876 }
1877
1878 /* See if the machine can do this with a store multiple insn. */
1879 #ifdef HAVE_store_multiple
1880 if (HAVE_store_multiple)
1881 {
1882 last = get_last_insn ();
1883 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1884 GEN_INT (nregs));
1885 if (pat)
1886 {
1887 emit_insn (pat);
1888 return;
1889 }
1890 else
1891 delete_insns_since (last);
1892 }
1893 #endif
1894
1895 for (i = 0; i < nregs; i++)
1896 {
1897 rtx tem = operand_subword (x, i, 1, BLKmode);
1898
1899 if (tem == 0)
1900 abort ();
1901
1902 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1903 }
1904 }
1905
1906 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1907 registers represented by a PARALLEL. SSIZE represents the total size of
1908 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1909 SRC in bits. */
1910 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1911 the balance will be in what would be the low-order memory addresses, i.e.
1912 left justified for big endian, right justified for little endian. This
1913 happens to be true for the targets currently using this support. If this
1914 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1915 would be needed. */
1916
1917 void
1918 emit_group_load (dst, orig_src, ssize, align)
1919 rtx dst, orig_src;
1920 unsigned int align;
1921 int ssize;
1922 {
1923 rtx *tmps, src;
1924 int start, i;
1925
1926 if (GET_CODE (dst) != PARALLEL)
1927 abort ();
1928
1929 /* Check for a NULL entry, used to indicate that the parameter goes
1930 both on the stack and in registers. */
1931 if (XEXP (XVECEXP (dst, 0, 0), 0))
1932 start = 0;
1933 else
1934 start = 1;
1935
1936 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1937
1938 /* If we won't be loading directly from memory, protect the real source
1939 from strange tricks we might play. */
1940 src = orig_src;
1941 if (GET_CODE (src) != MEM)
1942 {
1943 if (GET_MODE (src) == VOIDmode)
1944 src = gen_reg_rtx (GET_MODE (dst));
1945 else
1946 src = gen_reg_rtx (GET_MODE (orig_src));
1947 emit_move_insn (src, orig_src);
1948 }
1949
1950 /* Process the pieces. */
1951 for (i = start; i < XVECLEN (dst, 0); i++)
1952 {
1953 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1954 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1955 unsigned int bytelen = GET_MODE_SIZE (mode);
1956 int shift = 0;
1957
1958 /* Handle trailing fragments that run over the size of the struct. */
1959 if (ssize >= 0 && bytepos + bytelen > ssize)
1960 {
1961 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1962 bytelen = ssize - bytepos;
1963 if (bytelen <= 0)
1964 abort ();
1965 }
1966
1967 /* Optimize the access just a bit. */
1968 if (GET_CODE (src) == MEM
1969 && align >= GET_MODE_ALIGNMENT (mode)
1970 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1971 && bytelen == GET_MODE_SIZE (mode))
1972 {
1973 tmps[i] = gen_reg_rtx (mode);
1974 emit_move_insn (tmps[i],
1975 change_address (src, mode,
1976 plus_constant (XEXP (src, 0),
1977 bytepos)));
1978 }
1979 else if (GET_CODE (src) == CONCAT)
1980 {
1981 if (bytepos == 0
1982 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1983 tmps[i] = XEXP (src, 0);
1984 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1985 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1986 tmps[i] = XEXP (src, 1);
1987 else
1988 abort ();
1989 }
1990 else
1991 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1992 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1993 mode, mode, align, ssize);
1994
1995 if (BYTES_BIG_ENDIAN && shift)
1996 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1997 tmps[i], 0, OPTAB_WIDEN);
1998 }
1999
2000 emit_queue ();
2001
2002 /* Copy the extracted pieces into the proper (probable) hard regs. */
2003 for (i = start; i < XVECLEN (dst, 0); i++)
2004 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2005 }
2006
2007 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2008 registers represented by a PARALLEL. SSIZE represents the total size of
2009 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2010
2011 void
2012 emit_group_store (orig_dst, src, ssize, align)
2013 rtx orig_dst, src;
2014 int ssize;
2015 unsigned int align;
2016 {
2017 rtx *tmps, dst;
2018 int start, i;
2019
2020 if (GET_CODE (src) != PARALLEL)
2021 abort ();
2022
2023 /* Check for a NULL entry, used to indicate that the parameter goes
2024 both on the stack and in registers. */
2025 if (XEXP (XVECEXP (src, 0, 0), 0))
2026 start = 0;
2027 else
2028 start = 1;
2029
2030 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2031
2032 /* Copy the (probable) hard regs into pseudos. */
2033 for (i = start; i < XVECLEN (src, 0); i++)
2034 {
2035 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2036 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2037 emit_move_insn (tmps[i], reg);
2038 }
2039 emit_queue ();
2040
2041 /* If we won't be storing directly into memory, protect the real destination
2042 from strange tricks we might play. */
2043 dst = orig_dst;
2044 if (GET_CODE (dst) == PARALLEL)
2045 {
2046 rtx temp;
2047
2048 /* We can get a PARALLEL dst if there is a conditional expression in
2049 a return statement. In that case, the dst and src are the same,
2050 so no action is necessary. */
2051 if (rtx_equal_p (dst, src))
2052 return;
2053
2054 /* It is unclear if we can ever reach here, but we may as well handle
2055 it. Allocate a temporary, and split this into a store/load to/from
2056 the temporary. */
2057
2058 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2059 emit_group_store (temp, src, ssize, align);
2060 emit_group_load (dst, temp, ssize, align);
2061 return;
2062 }
2063 else if (GET_CODE (dst) != MEM)
2064 {
2065 dst = gen_reg_rtx (GET_MODE (orig_dst));
2066 /* Make life a bit easier for combine. */
2067 emit_move_insn (dst, const0_rtx);
2068 }
2069
2070 /* Process the pieces. */
2071 for (i = start; i < XVECLEN (src, 0); i++)
2072 {
2073 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2074 enum machine_mode mode = GET_MODE (tmps[i]);
2075 unsigned int bytelen = GET_MODE_SIZE (mode);
2076
2077 /* Handle trailing fragments that run over the size of the struct. */
2078 if (ssize >= 0 && bytepos + bytelen > ssize)
2079 {
2080 if (BYTES_BIG_ENDIAN)
2081 {
2082 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2083 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2084 tmps[i], 0, OPTAB_WIDEN);
2085 }
2086 bytelen = ssize - bytepos;
2087 }
2088
2089 /* Optimize the access just a bit. */
2090 if (GET_CODE (dst) == MEM
2091 && align >= GET_MODE_ALIGNMENT (mode)
2092 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2093 && bytelen == GET_MODE_SIZE (mode))
2094 emit_move_insn (change_address (dst, mode,
2095 plus_constant (XEXP (dst, 0),
2096 bytepos)),
2097 tmps[i]);
2098 else
2099 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2100 mode, tmps[i], align, ssize);
2101 }
2102
2103 emit_queue ();
2104
2105 /* Copy from the pseudo into the (probable) hard reg. */
2106 if (GET_CODE (dst) == REG)
2107 emit_move_insn (orig_dst, dst);
2108 }
2109
2110 /* Generate code to copy a BLKmode object of TYPE out of a
2111 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2112 is null, a stack temporary is created. TGTBLK is returned.
2113
2114 The primary purpose of this routine is to handle functions
2115 that return BLKmode structures in registers. Some machines
2116 (the PA for example) want to return all small structures
2117 in registers regardless of the structure's alignment. */
2118
2119 rtx
2120 copy_blkmode_from_reg (tgtblk, srcreg, type)
2121 rtx tgtblk;
2122 rtx srcreg;
2123 tree type;
2124 {
2125 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2126 rtx src = NULL, dst = NULL;
2127 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2128 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2129
2130 if (tgtblk == 0)
2131 {
2132 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2133 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2134 preserve_temp_slots (tgtblk);
2135 }
2136
2137 /* This code assumes srcreg is at least a full word. If it isn't,
2138 copy it into a new pseudo which is a full word. */
2139 if (GET_MODE (srcreg) != BLKmode
2140 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2141 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2142
2143 /* Structures whose size is not a multiple of a word are aligned
2144 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2145 machine, this means we must skip the empty high order bytes when
2146 calculating the bit offset. */
2147 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2148 big_endian_correction
2149 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2150
2151 /* Copy the structure BITSIZE bites at a time.
2152
2153 We could probably emit more efficient code for machines which do not use
2154 strict alignment, but it doesn't seem worth the effort at the current
2155 time. */
2156 for (bitpos = 0, xbitpos = big_endian_correction;
2157 bitpos < bytes * BITS_PER_UNIT;
2158 bitpos += bitsize, xbitpos += bitsize)
2159 {
2160 /* We need a new source operand each time xbitpos is on a
2161 word boundary and when xbitpos == big_endian_correction
2162 (the first time through). */
2163 if (xbitpos % BITS_PER_WORD == 0
2164 || xbitpos == big_endian_correction)
2165 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2166
2167 /* We need a new destination operand each time bitpos is on
2168 a word boundary. */
2169 if (bitpos % BITS_PER_WORD == 0)
2170 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2171
2172 /* Use xbitpos for the source extraction (right justified) and
2173 xbitpos for the destination store (left justified). */
2174 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2175 extract_bit_field (src, bitsize,
2176 xbitpos % BITS_PER_WORD, 1,
2177 NULL_RTX, word_mode, word_mode,
2178 bitsize, BITS_PER_WORD),
2179 bitsize, BITS_PER_WORD);
2180 }
2181
2182 return tgtblk;
2183 }
2184
2185 /* Add a USE expression for REG to the (possibly empty) list pointed
2186 to by CALL_FUSAGE. REG must denote a hard register. */
2187
2188 void
2189 use_reg (call_fusage, reg)
2190 rtx *call_fusage, reg;
2191 {
2192 if (GET_CODE (reg) != REG
2193 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2194 abort ();
2195
2196 *call_fusage
2197 = gen_rtx_EXPR_LIST (VOIDmode,
2198 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2199 }
2200
2201 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2202 starting at REGNO. All of these registers must be hard registers. */
2203
2204 void
2205 use_regs (call_fusage, regno, nregs)
2206 rtx *call_fusage;
2207 int regno;
2208 int nregs;
2209 {
2210 int i;
2211
2212 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2213 abort ();
2214
2215 for (i = 0; i < nregs; i++)
2216 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2217 }
2218
2219 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2220 PARALLEL REGS. This is for calls that pass values in multiple
2221 non-contiguous locations. The Irix 6 ABI has examples of this. */
2222
2223 void
2224 use_group_regs (call_fusage, regs)
2225 rtx *call_fusage;
2226 rtx regs;
2227 {
2228 int i;
2229
2230 for (i = 0; i < XVECLEN (regs, 0); i++)
2231 {
2232 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2233
2234 /* A NULL entry means the parameter goes both on the stack and in
2235 registers. This can also be a MEM for targets that pass values
2236 partially on the stack and partially in registers. */
2237 if (reg != 0 && GET_CODE (reg) == REG)
2238 use_reg (call_fusage, reg);
2239 }
2240 }
2241 \f
2242 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2243 rtx with BLKmode). The caller must pass TO through protect_from_queue
2244 before calling. ALIGN is maximum alignment we can assume. */
2245
2246 static void
2247 clear_by_pieces (to, len, align)
2248 rtx to;
2249 unsigned HOST_WIDE_INT len;
2250 unsigned int align;
2251 {
2252 struct clear_by_pieces data;
2253 rtx to_addr = XEXP (to, 0);
2254 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2255 enum machine_mode mode = VOIDmode, tmode;
2256 enum insn_code icode;
2257
2258 data.offset = 0;
2259 data.to_addr = to_addr;
2260 data.to = to;
2261 data.autinc_to
2262 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2263 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2264
2265 data.explicit_inc_to = 0;
2266 data.reverse
2267 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2268 if (data.reverse)
2269 data.offset = len;
2270 data.len = len;
2271
2272 /* If copying requires more than two move insns,
2273 copy addresses to registers (to make displacements shorter)
2274 and use post-increment if available. */
2275 if (!data.autinc_to
2276 && move_by_pieces_ninsns (len, align) > 2)
2277 {
2278 /* Determine the main mode we'll be using. */
2279 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2280 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2281 if (GET_MODE_SIZE (tmode) < max_size)
2282 mode = tmode;
2283
2284 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2285 {
2286 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2287 data.autinc_to = 1;
2288 data.explicit_inc_to = -1;
2289 }
2290
2291 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2292 && ! data.autinc_to)
2293 {
2294 data.to_addr = copy_addr_to_reg (to_addr);
2295 data.autinc_to = 1;
2296 data.explicit_inc_to = 1;
2297 }
2298
2299 if ( !data.autinc_to && CONSTANT_P (to_addr))
2300 data.to_addr = copy_addr_to_reg (to_addr);
2301 }
2302
2303 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2304 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2305 align = MOVE_MAX * BITS_PER_UNIT;
2306
2307 /* First move what we can in the largest integer mode, then go to
2308 successively smaller modes. */
2309
2310 while (max_size > 1)
2311 {
2312 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2313 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2314 if (GET_MODE_SIZE (tmode) < max_size)
2315 mode = tmode;
2316
2317 if (mode == VOIDmode)
2318 break;
2319
2320 icode = mov_optab->handlers[(int) mode].insn_code;
2321 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2322 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2323
2324 max_size = GET_MODE_SIZE (mode);
2325 }
2326
2327 /* The code above should have handled everything. */
2328 if (data.len != 0)
2329 abort ();
2330 }
2331
2332 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2333 with move instructions for mode MODE. GENFUN is the gen_... function
2334 to make a move insn for that mode. DATA has all the other info. */
2335
2336 static void
2337 clear_by_pieces_1 (genfun, mode, data)
2338 rtx (*genfun) PARAMS ((rtx, ...));
2339 enum machine_mode mode;
2340 struct clear_by_pieces *data;
2341 {
2342 unsigned int size = GET_MODE_SIZE (mode);
2343 rtx to1;
2344
2345 while (data->len >= size)
2346 {
2347 if (data->reverse)
2348 data->offset -= size;
2349
2350 if (data->autinc_to)
2351 {
2352 to1 = gen_rtx_MEM (mode, data->to_addr);
2353 MEM_COPY_ATTRIBUTES (to1, data->to);
2354 }
2355 else
2356 to1 = change_address (data->to, mode,
2357 plus_constant (data->to_addr, data->offset));
2358
2359 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2360 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2361
2362 emit_insn ((*genfun) (to1, const0_rtx));
2363
2364 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2365 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2366
2367 if (! data->reverse)
2368 data->offset += size;
2369
2370 data->len -= size;
2371 }
2372 }
2373 \f
2374 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2375 its length in bytes and ALIGN is the maximum alignment we can is has.
2376
2377 If we call a function that returns the length of the block, return it. */
2378
2379 rtx
2380 clear_storage (object, size, align)
2381 rtx object;
2382 rtx size;
2383 unsigned int align;
2384 {
2385 #ifdef TARGET_MEM_FUNCTIONS
2386 static tree fn;
2387 tree call_expr, arg_list;
2388 #endif
2389 rtx retval = 0;
2390
2391 if (GET_MODE (object) == BLKmode)
2392 {
2393 object = protect_from_queue (object, 1);
2394 size = protect_from_queue (size, 0);
2395
2396 if (GET_CODE (size) == CONST_INT
2397 && MOVE_BY_PIECES_P (INTVAL (size), align))
2398 clear_by_pieces (object, INTVAL (size), align);
2399 else
2400 {
2401 /* Try the most limited insn first, because there's no point
2402 including more than one in the machine description unless
2403 the more limited one has some advantage. */
2404
2405 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2406 enum machine_mode mode;
2407
2408 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2409 mode = GET_MODE_WIDER_MODE (mode))
2410 {
2411 enum insn_code code = clrstr_optab[(int) mode];
2412 insn_operand_predicate_fn pred;
2413
2414 if (code != CODE_FOR_nothing
2415 /* We don't need MODE to be narrower than
2416 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2417 the mode mask, as it is returned by the macro, it will
2418 definitely be less than the actual mode mask. */
2419 && ((GET_CODE (size) == CONST_INT
2420 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2421 <= (GET_MODE_MASK (mode) >> 1)))
2422 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2423 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2424 || (*pred) (object, BLKmode))
2425 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2426 || (*pred) (opalign, VOIDmode)))
2427 {
2428 rtx op1;
2429 rtx last = get_last_insn ();
2430 rtx pat;
2431
2432 op1 = convert_to_mode (mode, size, 1);
2433 pred = insn_data[(int) code].operand[1].predicate;
2434 if (pred != 0 && ! (*pred) (op1, mode))
2435 op1 = copy_to_mode_reg (mode, op1);
2436
2437 pat = GEN_FCN ((int) code) (object, op1, opalign);
2438 if (pat)
2439 {
2440 emit_insn (pat);
2441 return 0;
2442 }
2443 else
2444 delete_insns_since (last);
2445 }
2446 }
2447
2448 /* OBJECT or SIZE may have been passed through protect_from_queue.
2449
2450 It is unsafe to save the value generated by protect_from_queue
2451 and reuse it later. Consider what happens if emit_queue is
2452 called before the return value from protect_from_queue is used.
2453
2454 Expansion of the CALL_EXPR below will call emit_queue before
2455 we are finished emitting RTL for argument setup. So if we are
2456 not careful we could get the wrong value for an argument.
2457
2458 To avoid this problem we go ahead and emit code to copy OBJECT
2459 and SIZE into new pseudos. We can then place those new pseudos
2460 into an RTL_EXPR and use them later, even after a call to
2461 emit_queue.
2462
2463 Note this is not strictly needed for library calls since they
2464 do not call emit_queue before loading their arguments. However,
2465 we may need to have library calls call emit_queue in the future
2466 since failing to do so could cause problems for targets which
2467 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2468 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2469
2470 #ifdef TARGET_MEM_FUNCTIONS
2471 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2472 #else
2473 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2474 TREE_UNSIGNED (integer_type_node));
2475 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2476 #endif
2477
2478 #ifdef TARGET_MEM_FUNCTIONS
2479 /* It is incorrect to use the libcall calling conventions to call
2480 memset in this context.
2481
2482 This could be a user call to memset and the user may wish to
2483 examine the return value from memset.
2484
2485 For targets where libcalls and normal calls have different
2486 conventions for returning pointers, we could end up generating
2487 incorrect code.
2488
2489 So instead of using a libcall sequence we build up a suitable
2490 CALL_EXPR and expand the call in the normal fashion. */
2491 if (fn == NULL_TREE)
2492 {
2493 tree fntype;
2494
2495 /* This was copied from except.c, I don't know if all this is
2496 necessary in this context or not. */
2497 fn = get_identifier ("memset");
2498 push_obstacks_nochange ();
2499 end_temporary_allocation ();
2500 fntype = build_pointer_type (void_type_node);
2501 fntype = build_function_type (fntype, NULL_TREE);
2502 fn = build_decl (FUNCTION_DECL, fn, fntype);
2503 ggc_add_tree_root (&fn, 1);
2504 DECL_EXTERNAL (fn) = 1;
2505 TREE_PUBLIC (fn) = 1;
2506 DECL_ARTIFICIAL (fn) = 1;
2507 make_decl_rtl (fn, NULL_PTR, 1);
2508 assemble_external (fn);
2509 pop_obstacks ();
2510 }
2511
2512 /* We need to make an argument list for the function call.
2513
2514 memset has three arguments, the first is a void * addresses, the
2515 second a integer with the initialization value, the last is a
2516 size_t byte count for the copy. */
2517 arg_list
2518 = build_tree_list (NULL_TREE,
2519 make_tree (build_pointer_type (void_type_node),
2520 object));
2521 TREE_CHAIN (arg_list)
2522 = build_tree_list (NULL_TREE,
2523 make_tree (integer_type_node, const0_rtx));
2524 TREE_CHAIN (TREE_CHAIN (arg_list))
2525 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2526 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2527
2528 /* Now we have to build up the CALL_EXPR itself. */
2529 call_expr = build1 (ADDR_EXPR,
2530 build_pointer_type (TREE_TYPE (fn)), fn);
2531 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2532 call_expr, arg_list, NULL_TREE);
2533 TREE_SIDE_EFFECTS (call_expr) = 1;
2534
2535 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2536 #else
2537 emit_library_call (bzero_libfunc, 0,
2538 VOIDmode, 2, object, Pmode, size,
2539 TYPE_MODE (integer_type_node));
2540 #endif
2541 }
2542 }
2543 else
2544 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2545
2546 return retval;
2547 }
2548
2549 /* Generate code to copy Y into X.
2550 Both Y and X must have the same mode, except that
2551 Y can be a constant with VOIDmode.
2552 This mode cannot be BLKmode; use emit_block_move for that.
2553
2554 Return the last instruction emitted. */
2555
2556 rtx
2557 emit_move_insn (x, y)
2558 rtx x, y;
2559 {
2560 enum machine_mode mode = GET_MODE (x);
2561
2562 x = protect_from_queue (x, 1);
2563 y = protect_from_queue (y, 0);
2564
2565 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2566 abort ();
2567
2568 /* Never force constant_p_rtx to memory. */
2569 if (GET_CODE (y) == CONSTANT_P_RTX)
2570 ;
2571 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2572 y = force_const_mem (mode, y);
2573
2574 /* If X or Y are memory references, verify that their addresses are valid
2575 for the machine. */
2576 if (GET_CODE (x) == MEM
2577 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2578 && ! push_operand (x, GET_MODE (x)))
2579 || (flag_force_addr
2580 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2581 x = change_address (x, VOIDmode, XEXP (x, 0));
2582
2583 if (GET_CODE (y) == MEM
2584 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2585 || (flag_force_addr
2586 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2587 y = change_address (y, VOIDmode, XEXP (y, 0));
2588
2589 if (mode == BLKmode)
2590 abort ();
2591
2592 return emit_move_insn_1 (x, y);
2593 }
2594
2595 /* Low level part of emit_move_insn.
2596 Called just like emit_move_insn, but assumes X and Y
2597 are basically valid. */
2598
2599 rtx
2600 emit_move_insn_1 (x, y)
2601 rtx x, y;
2602 {
2603 enum machine_mode mode = GET_MODE (x);
2604 enum machine_mode submode;
2605 enum mode_class class = GET_MODE_CLASS (mode);
2606 unsigned int i;
2607
2608 if (mode >= MAX_MACHINE_MODE)
2609 abort ();
2610
2611 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2612 return
2613 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2614
2615 /* Expand complex moves by moving real part and imag part, if possible. */
2616 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2617 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2618 * BITS_PER_UNIT),
2619 (class == MODE_COMPLEX_INT
2620 ? MODE_INT : MODE_FLOAT),
2621 0))
2622 && (mov_optab->handlers[(int) submode].insn_code
2623 != CODE_FOR_nothing))
2624 {
2625 /* Don't split destination if it is a stack push. */
2626 int stack = push_operand (x, GET_MODE (x));
2627
2628 /* If this is a stack, push the highpart first, so it
2629 will be in the argument order.
2630
2631 In that case, change_address is used only to convert
2632 the mode, not to change the address. */
2633 if (stack)
2634 {
2635 /* Note that the real part always precedes the imag part in memory
2636 regardless of machine's endianness. */
2637 #ifdef STACK_GROWS_DOWNWARD
2638 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2639 (gen_rtx_MEM (submode, XEXP (x, 0)),
2640 gen_imagpart (submode, y)));
2641 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2642 (gen_rtx_MEM (submode, XEXP (x, 0)),
2643 gen_realpart (submode, y)));
2644 #else
2645 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2646 (gen_rtx_MEM (submode, XEXP (x, 0)),
2647 gen_realpart (submode, y)));
2648 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2649 (gen_rtx_MEM (submode, XEXP (x, 0)),
2650 gen_imagpart (submode, y)));
2651 #endif
2652 }
2653 else
2654 {
2655 rtx realpart_x, realpart_y;
2656 rtx imagpart_x, imagpart_y;
2657
2658 /* If this is a complex value with each part being smaller than a
2659 word, the usual calling sequence will likely pack the pieces into
2660 a single register. Unfortunately, SUBREG of hard registers only
2661 deals in terms of words, so we have a problem converting input
2662 arguments to the CONCAT of two registers that is used elsewhere
2663 for complex values. If this is before reload, we can copy it into
2664 memory and reload. FIXME, we should see about using extract and
2665 insert on integer registers, but complex short and complex char
2666 variables should be rarely used. */
2667 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2668 && (reload_in_progress | reload_completed) == 0)
2669 {
2670 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2671 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2672
2673 if (packed_dest_p || packed_src_p)
2674 {
2675 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2676 ? MODE_FLOAT : MODE_INT);
2677
2678 enum machine_mode reg_mode =
2679 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2680
2681 if (reg_mode != BLKmode)
2682 {
2683 rtx mem = assign_stack_temp (reg_mode,
2684 GET_MODE_SIZE (mode), 0);
2685
2686 rtx cmem = change_address (mem, mode, NULL_RTX);
2687
2688 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2689
2690 if (packed_dest_p)
2691 {
2692 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2693 emit_move_insn_1 (cmem, y);
2694 return emit_move_insn_1 (sreg, mem);
2695 }
2696 else
2697 {
2698 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2699 emit_move_insn_1 (mem, sreg);
2700 return emit_move_insn_1 (x, cmem);
2701 }
2702 }
2703 }
2704 }
2705
2706 realpart_x = gen_realpart (submode, x);
2707 realpart_y = gen_realpart (submode, y);
2708 imagpart_x = gen_imagpart (submode, x);
2709 imagpart_y = gen_imagpart (submode, y);
2710
2711 /* Show the output dies here. This is necessary for SUBREGs
2712 of pseudos since we cannot track their lifetimes correctly;
2713 hard regs shouldn't appear here except as return values.
2714 We never want to emit such a clobber after reload. */
2715 if (x != y
2716 && ! (reload_in_progress || reload_completed)
2717 && (GET_CODE (realpart_x) == SUBREG
2718 || GET_CODE (imagpart_x) == SUBREG))
2719 {
2720 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2721 }
2722
2723 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2724 (realpart_x, realpart_y));
2725 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2726 (imagpart_x, imagpart_y));
2727 }
2728
2729 return get_last_insn ();
2730 }
2731
2732 /* This will handle any multi-word mode that lacks a move_insn pattern.
2733 However, you will get better code if you define such patterns,
2734 even if they must turn into multiple assembler instructions. */
2735 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2736 {
2737 rtx last_insn = 0;
2738 rtx seq, inner;
2739 int need_clobber;
2740
2741 #ifdef PUSH_ROUNDING
2742
2743 /* If X is a push on the stack, do the push now and replace
2744 X with a reference to the stack pointer. */
2745 if (push_operand (x, GET_MODE (x)))
2746 {
2747 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2748 x = change_address (x, VOIDmode, stack_pointer_rtx);
2749 }
2750 #endif
2751
2752 /* If we are in reload, see if either operand is a MEM whose address
2753 is scheduled for replacement. */
2754 if (reload_in_progress && GET_CODE (x) == MEM
2755 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2756 {
2757 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2758
2759 MEM_COPY_ATTRIBUTES (new, x);
2760 x = new;
2761 }
2762 if (reload_in_progress && GET_CODE (y) == MEM
2763 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2764 {
2765 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2766
2767 MEM_COPY_ATTRIBUTES (new, y);
2768 y = new;
2769 }
2770
2771 start_sequence ();
2772
2773 need_clobber = 0;
2774 for (i = 0;
2775 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2776 i++)
2777 {
2778 rtx xpart = operand_subword (x, i, 1, mode);
2779 rtx ypart = operand_subword (y, i, 1, mode);
2780
2781 /* If we can't get a part of Y, put Y into memory if it is a
2782 constant. Otherwise, force it into a register. If we still
2783 can't get a part of Y, abort. */
2784 if (ypart == 0 && CONSTANT_P (y))
2785 {
2786 y = force_const_mem (mode, y);
2787 ypart = operand_subword (y, i, 1, mode);
2788 }
2789 else if (ypart == 0)
2790 ypart = operand_subword_force (y, i, mode);
2791
2792 if (xpart == 0 || ypart == 0)
2793 abort ();
2794
2795 need_clobber |= (GET_CODE (xpart) == SUBREG);
2796
2797 last_insn = emit_move_insn (xpart, ypart);
2798 }
2799
2800 seq = gen_sequence ();
2801 end_sequence ();
2802
2803 /* Show the output dies here. This is necessary for SUBREGs
2804 of pseudos since we cannot track their lifetimes correctly;
2805 hard regs shouldn't appear here except as return values.
2806 We never want to emit such a clobber after reload. */
2807 if (x != y
2808 && ! (reload_in_progress || reload_completed)
2809 && need_clobber != 0)
2810 {
2811 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2812 }
2813
2814 emit_insn (seq);
2815
2816 return last_insn;
2817 }
2818 else
2819 abort ();
2820 }
2821 \f
2822 /* Pushing data onto the stack. */
2823
2824 /* Push a block of length SIZE (perhaps variable)
2825 and return an rtx to address the beginning of the block.
2826 Note that it is not possible for the value returned to be a QUEUED.
2827 The value may be virtual_outgoing_args_rtx.
2828
2829 EXTRA is the number of bytes of padding to push in addition to SIZE.
2830 BELOW nonzero means this padding comes at low addresses;
2831 otherwise, the padding comes at high addresses. */
2832
2833 rtx
2834 push_block (size, extra, below)
2835 rtx size;
2836 int extra, below;
2837 {
2838 register rtx temp;
2839
2840 size = convert_modes (Pmode, ptr_mode, size, 1);
2841 if (CONSTANT_P (size))
2842 anti_adjust_stack (plus_constant (size, extra));
2843 else if (GET_CODE (size) == REG && extra == 0)
2844 anti_adjust_stack (size);
2845 else
2846 {
2847 temp = copy_to_mode_reg (Pmode, size);
2848 if (extra != 0)
2849 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2850 temp, 0, OPTAB_LIB_WIDEN);
2851 anti_adjust_stack (temp);
2852 }
2853
2854 #ifndef STACK_GROWS_DOWNWARD
2855 #ifdef ARGS_GROW_DOWNWARD
2856 if (!ACCUMULATE_OUTGOING_ARGS)
2857 #else
2858 if (0)
2859 #endif
2860 #else
2861 if (1)
2862 #endif
2863 {
2864 /* Return the lowest stack address when STACK or ARGS grow downward and
2865 we are not aaccumulating outgoing arguments (the c4x port uses such
2866 conventions). */
2867 temp = virtual_outgoing_args_rtx;
2868 if (extra != 0 && below)
2869 temp = plus_constant (temp, extra);
2870 }
2871 else
2872 {
2873 if (GET_CODE (size) == CONST_INT)
2874 temp = plus_constant (virtual_outgoing_args_rtx,
2875 -INTVAL (size) - (below ? 0 : extra));
2876 else if (extra != 0 && !below)
2877 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2878 negate_rtx (Pmode, plus_constant (size, extra)));
2879 else
2880 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2881 negate_rtx (Pmode, size));
2882 }
2883
2884 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2885 }
2886
2887 rtx
2888 gen_push_operand ()
2889 {
2890 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2891 }
2892
2893 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2894 block of SIZE bytes. */
2895
2896 static rtx
2897 get_push_address (size)
2898 int size;
2899 {
2900 register rtx temp;
2901
2902 if (STACK_PUSH_CODE == POST_DEC)
2903 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2904 else if (STACK_PUSH_CODE == POST_INC)
2905 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2906 else
2907 temp = stack_pointer_rtx;
2908
2909 return copy_to_reg (temp);
2910 }
2911
2912 /* Generate code to push X onto the stack, assuming it has mode MODE and
2913 type TYPE.
2914 MODE is redundant except when X is a CONST_INT (since they don't
2915 carry mode info).
2916 SIZE is an rtx for the size of data to be copied (in bytes),
2917 needed only if X is BLKmode.
2918
2919 ALIGN is maximum alignment we can assume.
2920
2921 If PARTIAL and REG are both nonzero, then copy that many of the first
2922 words of X into registers starting with REG, and push the rest of X.
2923 The amount of space pushed is decreased by PARTIAL words,
2924 rounded *down* to a multiple of PARM_BOUNDARY.
2925 REG must be a hard register in this case.
2926 If REG is zero but PARTIAL is not, take any all others actions for an
2927 argument partially in registers, but do not actually load any
2928 registers.
2929
2930 EXTRA is the amount in bytes of extra space to leave next to this arg.
2931 This is ignored if an argument block has already been allocated.
2932
2933 On a machine that lacks real push insns, ARGS_ADDR is the address of
2934 the bottom of the argument block for this call. We use indexing off there
2935 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2936 argument block has not been preallocated.
2937
2938 ARGS_SO_FAR is the size of args previously pushed for this call.
2939
2940 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2941 for arguments passed in registers. If nonzero, it will be the number
2942 of bytes required. */
2943
2944 void
2945 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2946 args_addr, args_so_far, reg_parm_stack_space,
2947 alignment_pad)
2948 register rtx x;
2949 enum machine_mode mode;
2950 tree type;
2951 rtx size;
2952 unsigned int align;
2953 int partial;
2954 rtx reg;
2955 int extra;
2956 rtx args_addr;
2957 rtx args_so_far;
2958 int reg_parm_stack_space;
2959 rtx alignment_pad;
2960 {
2961 rtx xinner;
2962 enum direction stack_direction
2963 #ifdef STACK_GROWS_DOWNWARD
2964 = downward;
2965 #else
2966 = upward;
2967 #endif
2968
2969 /* Decide where to pad the argument: `downward' for below,
2970 `upward' for above, or `none' for don't pad it.
2971 Default is below for small data on big-endian machines; else above. */
2972 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2973
2974 /* Invert direction if stack is post-update. */
2975 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2976 if (where_pad != none)
2977 where_pad = (where_pad == downward ? upward : downward);
2978
2979 xinner = x = protect_from_queue (x, 0);
2980
2981 if (mode == BLKmode)
2982 {
2983 /* Copy a block into the stack, entirely or partially. */
2984
2985 register rtx temp;
2986 int used = partial * UNITS_PER_WORD;
2987 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2988 int skip;
2989
2990 if (size == 0)
2991 abort ();
2992
2993 used -= offset;
2994
2995 /* USED is now the # of bytes we need not copy to the stack
2996 because registers will take care of them. */
2997
2998 if (partial != 0)
2999 xinner = change_address (xinner, BLKmode,
3000 plus_constant (XEXP (xinner, 0), used));
3001
3002 /* If the partial register-part of the arg counts in its stack size,
3003 skip the part of stack space corresponding to the registers.
3004 Otherwise, start copying to the beginning of the stack space,
3005 by setting SKIP to 0. */
3006 skip = (reg_parm_stack_space == 0) ? 0 : used;
3007
3008 #ifdef PUSH_ROUNDING
3009 /* Do it with several push insns if that doesn't take lots of insns
3010 and if there is no difficulty with push insns that skip bytes
3011 on the stack for alignment purposes. */
3012 if (args_addr == 0
3013 && PUSH_ARGS
3014 && GET_CODE (size) == CONST_INT
3015 && skip == 0
3016 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3017 /* Here we avoid the case of a structure whose weak alignment
3018 forces many pushes of a small amount of data,
3019 and such small pushes do rounding that causes trouble. */
3020 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3021 || align >= BIGGEST_ALIGNMENT
3022 || PUSH_ROUNDING (align) == align)
3023 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3024 {
3025 /* Push padding now if padding above and stack grows down,
3026 or if padding below and stack grows up.
3027 But if space already allocated, this has already been done. */
3028 if (extra && args_addr == 0
3029 && where_pad != none && where_pad != stack_direction)
3030 anti_adjust_stack (GEN_INT (extra));
3031
3032 stack_pointer_delta += INTVAL (size) - used;
3033 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3034 INTVAL (size) - used, align);
3035
3036 if (current_function_check_memory_usage && ! in_check_memory_usage)
3037 {
3038 rtx temp;
3039
3040 in_check_memory_usage = 1;
3041 temp = get_push_address (INTVAL (size) - used);
3042 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3043 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3044 temp, Pmode,
3045 XEXP (xinner, 0), Pmode,
3046 GEN_INT (INTVAL (size) - used),
3047 TYPE_MODE (sizetype));
3048 else
3049 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3050 temp, Pmode,
3051 GEN_INT (INTVAL (size) - used),
3052 TYPE_MODE (sizetype),
3053 GEN_INT (MEMORY_USE_RW),
3054 TYPE_MODE (integer_type_node));
3055 in_check_memory_usage = 0;
3056 }
3057 }
3058 else
3059 #endif /* PUSH_ROUNDING */
3060 {
3061 rtx target;
3062
3063 /* Otherwise make space on the stack and copy the data
3064 to the address of that space. */
3065
3066 /* Deduct words put into registers from the size we must copy. */
3067 if (partial != 0)
3068 {
3069 if (GET_CODE (size) == CONST_INT)
3070 size = GEN_INT (INTVAL (size) - used);
3071 else
3072 size = expand_binop (GET_MODE (size), sub_optab, size,
3073 GEN_INT (used), NULL_RTX, 0,
3074 OPTAB_LIB_WIDEN);
3075 }
3076
3077 /* Get the address of the stack space.
3078 In this case, we do not deal with EXTRA separately.
3079 A single stack adjust will do. */
3080 if (! args_addr)
3081 {
3082 temp = push_block (size, extra, where_pad == downward);
3083 extra = 0;
3084 }
3085 else if (GET_CODE (args_so_far) == CONST_INT)
3086 temp = memory_address (BLKmode,
3087 plus_constant (args_addr,
3088 skip + INTVAL (args_so_far)));
3089 else
3090 temp = memory_address (BLKmode,
3091 plus_constant (gen_rtx_PLUS (Pmode,
3092 args_addr,
3093 args_so_far),
3094 skip));
3095 if (current_function_check_memory_usage && ! in_check_memory_usage)
3096 {
3097 in_check_memory_usage = 1;
3098 target = copy_to_reg (temp);
3099 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3100 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3101 target, Pmode,
3102 XEXP (xinner, 0), Pmode,
3103 size, TYPE_MODE (sizetype));
3104 else
3105 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3106 target, Pmode,
3107 size, TYPE_MODE (sizetype),
3108 GEN_INT (MEMORY_USE_RW),
3109 TYPE_MODE (integer_type_node));
3110 in_check_memory_usage = 0;
3111 }
3112
3113 target = gen_rtx_MEM (BLKmode, temp);
3114
3115 if (type != 0)
3116 {
3117 set_mem_attributes (target, type, 1);
3118 /* Function incoming arguments may overlap with sibling call
3119 outgoing arguments and we cannot allow reordering of reads
3120 from function arguments with stores to outgoing arguments
3121 of sibling calls. */
3122 MEM_ALIAS_SET (target) = 0;
3123 }
3124
3125 /* TEMP is the address of the block. Copy the data there. */
3126 if (GET_CODE (size) == CONST_INT
3127 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3128 {
3129 move_by_pieces (target, xinner, INTVAL (size), align);
3130 goto ret;
3131 }
3132 else
3133 {
3134 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3135 enum machine_mode mode;
3136
3137 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3138 mode != VOIDmode;
3139 mode = GET_MODE_WIDER_MODE (mode))
3140 {
3141 enum insn_code code = movstr_optab[(int) mode];
3142 insn_operand_predicate_fn pred;
3143
3144 if (code != CODE_FOR_nothing
3145 && ((GET_CODE (size) == CONST_INT
3146 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3147 <= (GET_MODE_MASK (mode) >> 1)))
3148 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3149 && (!(pred = insn_data[(int) code].operand[0].predicate)
3150 || ((*pred) (target, BLKmode)))
3151 && (!(pred = insn_data[(int) code].operand[1].predicate)
3152 || ((*pred) (xinner, BLKmode)))
3153 && (!(pred = insn_data[(int) code].operand[3].predicate)
3154 || ((*pred) (opalign, VOIDmode))))
3155 {
3156 rtx op2 = convert_to_mode (mode, size, 1);
3157 rtx last = get_last_insn ();
3158 rtx pat;
3159
3160 pred = insn_data[(int) code].operand[2].predicate;
3161 if (pred != 0 && ! (*pred) (op2, mode))
3162 op2 = copy_to_mode_reg (mode, op2);
3163
3164 pat = GEN_FCN ((int) code) (target, xinner,
3165 op2, opalign);
3166 if (pat)
3167 {
3168 emit_insn (pat);
3169 goto ret;
3170 }
3171 else
3172 delete_insns_since (last);
3173 }
3174 }
3175 }
3176
3177 if (!ACCUMULATE_OUTGOING_ARGS)
3178 {
3179 /* If the source is referenced relative to the stack pointer,
3180 copy it to another register to stabilize it. We do not need
3181 to do this if we know that we won't be changing sp. */
3182
3183 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3184 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3185 temp = copy_to_reg (temp);
3186 }
3187
3188 /* Make inhibit_defer_pop nonzero around the library call
3189 to force it to pop the bcopy-arguments right away. */
3190 NO_DEFER_POP;
3191 #ifdef TARGET_MEM_FUNCTIONS
3192 emit_library_call (memcpy_libfunc, 0,
3193 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3194 convert_to_mode (TYPE_MODE (sizetype),
3195 size, TREE_UNSIGNED (sizetype)),
3196 TYPE_MODE (sizetype));
3197 #else
3198 emit_library_call (bcopy_libfunc, 0,
3199 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3200 convert_to_mode (TYPE_MODE (integer_type_node),
3201 size,
3202 TREE_UNSIGNED (integer_type_node)),
3203 TYPE_MODE (integer_type_node));
3204 #endif
3205 OK_DEFER_POP;
3206 }
3207 }
3208 else if (partial > 0)
3209 {
3210 /* Scalar partly in registers. */
3211
3212 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3213 int i;
3214 int not_stack;
3215 /* # words of start of argument
3216 that we must make space for but need not store. */
3217 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3218 int args_offset = INTVAL (args_so_far);
3219 int skip;
3220
3221 /* Push padding now if padding above and stack grows down,
3222 or if padding below and stack grows up.
3223 But if space already allocated, this has already been done. */
3224 if (extra && args_addr == 0
3225 && where_pad != none && where_pad != stack_direction)
3226 anti_adjust_stack (GEN_INT (extra));
3227
3228 /* If we make space by pushing it, we might as well push
3229 the real data. Otherwise, we can leave OFFSET nonzero
3230 and leave the space uninitialized. */
3231 if (args_addr == 0)
3232 offset = 0;
3233
3234 /* Now NOT_STACK gets the number of words that we don't need to
3235 allocate on the stack. */
3236 not_stack = partial - offset;
3237
3238 /* If the partial register-part of the arg counts in its stack size,
3239 skip the part of stack space corresponding to the registers.
3240 Otherwise, start copying to the beginning of the stack space,
3241 by setting SKIP to 0. */
3242 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3243
3244 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3245 x = validize_mem (force_const_mem (mode, x));
3246
3247 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3248 SUBREGs of such registers are not allowed. */
3249 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3250 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3251 x = copy_to_reg (x);
3252
3253 /* Loop over all the words allocated on the stack for this arg. */
3254 /* We can do it by words, because any scalar bigger than a word
3255 has a size a multiple of a word. */
3256 #ifndef PUSH_ARGS_REVERSED
3257 for (i = not_stack; i < size; i++)
3258 #else
3259 for (i = size - 1; i >= not_stack; i--)
3260 #endif
3261 if (i >= not_stack + offset)
3262 emit_push_insn (operand_subword_force (x, i, mode),
3263 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3264 0, args_addr,
3265 GEN_INT (args_offset + ((i - not_stack + skip)
3266 * UNITS_PER_WORD)),
3267 reg_parm_stack_space, alignment_pad);
3268 }
3269 else
3270 {
3271 rtx addr;
3272 rtx target = NULL_RTX;
3273 rtx dest;
3274
3275 /* Push padding now if padding above and stack grows down,
3276 or if padding below and stack grows up.
3277 But if space already allocated, this has already been done. */
3278 if (extra && args_addr == 0
3279 && where_pad != none && where_pad != stack_direction)
3280 anti_adjust_stack (GEN_INT (extra));
3281
3282 #ifdef PUSH_ROUNDING
3283 if (args_addr == 0 && PUSH_ARGS)
3284 {
3285 addr = gen_push_operand ();
3286 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3287 }
3288 else
3289 #endif
3290 {
3291 if (GET_CODE (args_so_far) == CONST_INT)
3292 addr
3293 = memory_address (mode,
3294 plus_constant (args_addr,
3295 INTVAL (args_so_far)));
3296 else
3297 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3298 args_so_far));
3299 target = addr;
3300 }
3301
3302 dest = gen_rtx_MEM (mode, addr);
3303 if (type != 0)
3304 {
3305 set_mem_attributes (dest, type, 1);
3306 /* Function incoming arguments may overlap with sibling call
3307 outgoing arguments and we cannot allow reordering of reads
3308 from function arguments with stores to outgoing arguments
3309 of sibling calls. */
3310 MEM_ALIAS_SET (dest) = 0;
3311 }
3312
3313 emit_move_insn (dest, x);
3314
3315 if (current_function_check_memory_usage && ! in_check_memory_usage)
3316 {
3317 in_check_memory_usage = 1;
3318 if (target == 0)
3319 target = get_push_address (GET_MODE_SIZE (mode));
3320
3321 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3322 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3323 target, Pmode,
3324 XEXP (x, 0), Pmode,
3325 GEN_INT (GET_MODE_SIZE (mode)),
3326 TYPE_MODE (sizetype));
3327 else
3328 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3329 target, Pmode,
3330 GEN_INT (GET_MODE_SIZE (mode)),
3331 TYPE_MODE (sizetype),
3332 GEN_INT (MEMORY_USE_RW),
3333 TYPE_MODE (integer_type_node));
3334 in_check_memory_usage = 0;
3335 }
3336 }
3337
3338 ret:
3339 /* If part should go in registers, copy that part
3340 into the appropriate registers. Do this now, at the end,
3341 since mem-to-mem copies above may do function calls. */
3342 if (partial > 0 && reg != 0)
3343 {
3344 /* Handle calls that pass values in multiple non-contiguous locations.
3345 The Irix 6 ABI has examples of this. */
3346 if (GET_CODE (reg) == PARALLEL)
3347 emit_group_load (reg, x, -1, align); /* ??? size? */
3348 else
3349 move_block_to_reg (REGNO (reg), x, partial, mode);
3350 }
3351
3352 if (extra && args_addr == 0 && where_pad == stack_direction)
3353 anti_adjust_stack (GEN_INT (extra));
3354
3355 if (alignment_pad && args_addr == 0)
3356 anti_adjust_stack (alignment_pad);
3357 }
3358 \f
3359 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3360 operations. */
3361
3362 static rtx
3363 get_subtarget (x)
3364 rtx x;
3365 {
3366 return ((x == 0
3367 /* Only registers can be subtargets. */
3368 || GET_CODE (x) != REG
3369 /* If the register is readonly, it can't be set more than once. */
3370 || RTX_UNCHANGING_P (x)
3371 /* Don't use hard regs to avoid extending their life. */
3372 || REGNO (x) < FIRST_PSEUDO_REGISTER
3373 /* Avoid subtargets inside loops,
3374 since they hide some invariant expressions. */
3375 || preserve_subexpressions_p ())
3376 ? 0 : x);
3377 }
3378
3379 /* Expand an assignment that stores the value of FROM into TO.
3380 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3381 (This may contain a QUEUED rtx;
3382 if the value is constant, this rtx is a constant.)
3383 Otherwise, the returned value is NULL_RTX.
3384
3385 SUGGEST_REG is no longer actually used.
3386 It used to mean, copy the value through a register
3387 and return that register, if that is possible.
3388 We now use WANT_VALUE to decide whether to do this. */
3389
3390 rtx
3391 expand_assignment (to, from, want_value, suggest_reg)
3392 tree to, from;
3393 int want_value;
3394 int suggest_reg ATTRIBUTE_UNUSED;
3395 {
3396 register rtx to_rtx = 0;
3397 rtx result;
3398
3399 /* Don't crash if the lhs of the assignment was erroneous. */
3400
3401 if (TREE_CODE (to) == ERROR_MARK)
3402 {
3403 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3404 return want_value ? result : NULL_RTX;
3405 }
3406
3407 /* Assignment of a structure component needs special treatment
3408 if the structure component's rtx is not simply a MEM.
3409 Assignment of an array element at a constant index, and assignment of
3410 an array element in an unaligned packed structure field, has the same
3411 problem. */
3412
3413 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3414 || TREE_CODE (to) == ARRAY_REF)
3415 {
3416 enum machine_mode mode1;
3417 HOST_WIDE_INT bitsize, bitpos;
3418 tree offset;
3419 int unsignedp;
3420 int volatilep = 0;
3421 tree tem;
3422 unsigned int alignment;
3423
3424 push_temp_slots ();
3425 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3426 &unsignedp, &volatilep, &alignment);
3427
3428 /* If we are going to use store_bit_field and extract_bit_field,
3429 make sure to_rtx will be safe for multiple use. */
3430
3431 if (mode1 == VOIDmode && want_value)
3432 tem = stabilize_reference (tem);
3433
3434 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3435 if (offset != 0)
3436 {
3437 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3438
3439 if (GET_CODE (to_rtx) != MEM)
3440 abort ();
3441
3442 if (GET_MODE (offset_rtx) != ptr_mode)
3443 {
3444 #ifdef POINTERS_EXTEND_UNSIGNED
3445 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3446 #else
3447 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3448 #endif
3449 }
3450
3451 /* A constant address in TO_RTX can have VOIDmode, we must not try
3452 to call force_reg for that case. Avoid that case. */
3453 if (GET_CODE (to_rtx) == MEM
3454 && GET_MODE (to_rtx) == BLKmode
3455 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3456 && bitsize
3457 && (bitpos % bitsize) == 0
3458 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3459 && alignment == GET_MODE_ALIGNMENT (mode1))
3460 {
3461 rtx temp = change_address (to_rtx, mode1,
3462 plus_constant (XEXP (to_rtx, 0),
3463 (bitpos /
3464 BITS_PER_UNIT)));
3465 if (GET_CODE (XEXP (temp, 0)) == REG)
3466 to_rtx = temp;
3467 else
3468 to_rtx = change_address (to_rtx, mode1,
3469 force_reg (GET_MODE (XEXP (temp, 0)),
3470 XEXP (temp, 0)));
3471 bitpos = 0;
3472 }
3473
3474 to_rtx = change_address (to_rtx, VOIDmode,
3475 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3476 force_reg (ptr_mode,
3477 offset_rtx)));
3478 }
3479
3480 if (volatilep)
3481 {
3482 if (GET_CODE (to_rtx) == MEM)
3483 {
3484 /* When the offset is zero, to_rtx is the address of the
3485 structure we are storing into, and hence may be shared.
3486 We must make a new MEM before setting the volatile bit. */
3487 if (offset == 0)
3488 to_rtx = copy_rtx (to_rtx);
3489
3490 MEM_VOLATILE_P (to_rtx) = 1;
3491 }
3492 #if 0 /* This was turned off because, when a field is volatile
3493 in an object which is not volatile, the object may be in a register,
3494 and then we would abort over here. */
3495 else
3496 abort ();
3497 #endif
3498 }
3499
3500 if (TREE_CODE (to) == COMPONENT_REF
3501 && TREE_READONLY (TREE_OPERAND (to, 1)))
3502 {
3503 if (offset == 0)
3504 to_rtx = copy_rtx (to_rtx);
3505
3506 RTX_UNCHANGING_P (to_rtx) = 1;
3507 }
3508
3509 /* Check the access. */
3510 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3511 {
3512 rtx to_addr;
3513 int size;
3514 int best_mode_size;
3515 enum machine_mode best_mode;
3516
3517 best_mode = get_best_mode (bitsize, bitpos,
3518 TYPE_ALIGN (TREE_TYPE (tem)),
3519 mode1, volatilep);
3520 if (best_mode == VOIDmode)
3521 best_mode = QImode;
3522
3523 best_mode_size = GET_MODE_BITSIZE (best_mode);
3524 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3525 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3526 size *= GET_MODE_SIZE (best_mode);
3527
3528 /* Check the access right of the pointer. */
3529 in_check_memory_usage = 1;
3530 if (size)
3531 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3532 to_addr, Pmode,
3533 GEN_INT (size), TYPE_MODE (sizetype),
3534 GEN_INT (MEMORY_USE_WO),
3535 TYPE_MODE (integer_type_node));
3536 in_check_memory_usage = 0;
3537 }
3538
3539 /* If this is a varying-length object, we must get the address of
3540 the source and do an explicit block move. */
3541 if (bitsize < 0)
3542 {
3543 unsigned int from_align;
3544 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3545 rtx inner_to_rtx
3546 = change_address (to_rtx, VOIDmode,
3547 plus_constant (XEXP (to_rtx, 0),
3548 bitpos / BITS_PER_UNIT));
3549
3550 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3551 MIN (alignment, from_align));
3552 free_temp_slots ();
3553 pop_temp_slots ();
3554 return to_rtx;
3555 }
3556 else
3557 {
3558 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3559 (want_value
3560 /* Spurious cast for HPUX compiler. */
3561 ? ((enum machine_mode)
3562 TYPE_MODE (TREE_TYPE (to)))
3563 : VOIDmode),
3564 unsignedp,
3565 alignment,
3566 int_size_in_bytes (TREE_TYPE (tem)),
3567 get_alias_set (to));
3568
3569 preserve_temp_slots (result);
3570 free_temp_slots ();
3571 pop_temp_slots ();
3572
3573 /* If the value is meaningful, convert RESULT to the proper mode.
3574 Otherwise, return nothing. */
3575 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3576 TYPE_MODE (TREE_TYPE (from)),
3577 result,
3578 TREE_UNSIGNED (TREE_TYPE (to)))
3579 : NULL_RTX);
3580 }
3581 }
3582
3583 /* If the rhs is a function call and its value is not an aggregate,
3584 call the function before we start to compute the lhs.
3585 This is needed for correct code for cases such as
3586 val = setjmp (buf) on machines where reference to val
3587 requires loading up part of an address in a separate insn.
3588
3589 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3590 since it might be a promoted variable where the zero- or sign- extension
3591 needs to be done. Handling this in the normal way is safe because no
3592 computation is done before the call. */
3593 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3594 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3595 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3596 && GET_CODE (DECL_RTL (to)) == REG))
3597 {
3598 rtx value;
3599
3600 push_temp_slots ();
3601 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3602 if (to_rtx == 0)
3603 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3604
3605 /* Handle calls that return values in multiple non-contiguous locations.
3606 The Irix 6 ABI has examples of this. */
3607 if (GET_CODE (to_rtx) == PARALLEL)
3608 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3609 TYPE_ALIGN (TREE_TYPE (from)));
3610 else if (GET_MODE (to_rtx) == BLKmode)
3611 emit_block_move (to_rtx, value, expr_size (from),
3612 TYPE_ALIGN (TREE_TYPE (from)));
3613 else
3614 {
3615 #ifdef POINTERS_EXTEND_UNSIGNED
3616 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3617 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3618 value = convert_memory_address (GET_MODE (to_rtx), value);
3619 #endif
3620 emit_move_insn (to_rtx, value);
3621 }
3622 preserve_temp_slots (to_rtx);
3623 free_temp_slots ();
3624 pop_temp_slots ();
3625 return want_value ? to_rtx : NULL_RTX;
3626 }
3627
3628 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3629 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3630
3631 if (to_rtx == 0)
3632 {
3633 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3634 if (GET_CODE (to_rtx) == MEM)
3635 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3636 }
3637
3638 /* Don't move directly into a return register. */
3639 if (TREE_CODE (to) == RESULT_DECL
3640 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3641 {
3642 rtx temp;
3643
3644 push_temp_slots ();
3645 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3646
3647 if (GET_CODE (to_rtx) == PARALLEL)
3648 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3649 TYPE_ALIGN (TREE_TYPE (from)));
3650 else
3651 emit_move_insn (to_rtx, temp);
3652
3653 preserve_temp_slots (to_rtx);
3654 free_temp_slots ();
3655 pop_temp_slots ();
3656 return want_value ? to_rtx : NULL_RTX;
3657 }
3658
3659 /* In case we are returning the contents of an object which overlaps
3660 the place the value is being stored, use a safe function when copying
3661 a value through a pointer into a structure value return block. */
3662 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3663 && current_function_returns_struct
3664 && !current_function_returns_pcc_struct)
3665 {
3666 rtx from_rtx, size;
3667
3668 push_temp_slots ();
3669 size = expr_size (from);
3670 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3671 EXPAND_MEMORY_USE_DONT);
3672
3673 /* Copy the rights of the bitmap. */
3674 if (current_function_check_memory_usage)
3675 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3676 XEXP (to_rtx, 0), Pmode,
3677 XEXP (from_rtx, 0), Pmode,
3678 convert_to_mode (TYPE_MODE (sizetype),
3679 size, TREE_UNSIGNED (sizetype)),
3680 TYPE_MODE (sizetype));
3681
3682 #ifdef TARGET_MEM_FUNCTIONS
3683 emit_library_call (memcpy_libfunc, 0,
3684 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3685 XEXP (from_rtx, 0), Pmode,
3686 convert_to_mode (TYPE_MODE (sizetype),
3687 size, TREE_UNSIGNED (sizetype)),
3688 TYPE_MODE (sizetype));
3689 #else
3690 emit_library_call (bcopy_libfunc, 0,
3691 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3692 XEXP (to_rtx, 0), Pmode,
3693 convert_to_mode (TYPE_MODE (integer_type_node),
3694 size, TREE_UNSIGNED (integer_type_node)),
3695 TYPE_MODE (integer_type_node));
3696 #endif
3697
3698 preserve_temp_slots (to_rtx);
3699 free_temp_slots ();
3700 pop_temp_slots ();
3701 return want_value ? to_rtx : NULL_RTX;
3702 }
3703
3704 /* Compute FROM and store the value in the rtx we got. */
3705
3706 push_temp_slots ();
3707 result = store_expr (from, to_rtx, want_value);
3708 preserve_temp_slots (result);
3709 free_temp_slots ();
3710 pop_temp_slots ();
3711 return want_value ? result : NULL_RTX;
3712 }
3713
3714 /* Generate code for computing expression EXP,
3715 and storing the value into TARGET.
3716 TARGET may contain a QUEUED rtx.
3717
3718 If WANT_VALUE is nonzero, return a copy of the value
3719 not in TARGET, so that we can be sure to use the proper
3720 value in a containing expression even if TARGET has something
3721 else stored in it. If possible, we copy the value through a pseudo
3722 and return that pseudo. Or, if the value is constant, we try to
3723 return the constant. In some cases, we return a pseudo
3724 copied *from* TARGET.
3725
3726 If the mode is BLKmode then we may return TARGET itself.
3727 It turns out that in BLKmode it doesn't cause a problem.
3728 because C has no operators that could combine two different
3729 assignments into the same BLKmode object with different values
3730 with no sequence point. Will other languages need this to
3731 be more thorough?
3732
3733 If WANT_VALUE is 0, we return NULL, to make sure
3734 to catch quickly any cases where the caller uses the value
3735 and fails to set WANT_VALUE. */
3736
3737 rtx
3738 store_expr (exp, target, want_value)
3739 register tree exp;
3740 register rtx target;
3741 int want_value;
3742 {
3743 register rtx temp;
3744 int dont_return_target = 0;
3745
3746 if (TREE_CODE (exp) == COMPOUND_EXPR)
3747 {
3748 /* Perform first part of compound expression, then assign from second
3749 part. */
3750 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3751 emit_queue ();
3752 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3753 }
3754 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3755 {
3756 /* For conditional expression, get safe form of the target. Then
3757 test the condition, doing the appropriate assignment on either
3758 side. This avoids the creation of unnecessary temporaries.
3759 For non-BLKmode, it is more efficient not to do this. */
3760
3761 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3762
3763 emit_queue ();
3764 target = protect_from_queue (target, 1);
3765
3766 do_pending_stack_adjust ();
3767 NO_DEFER_POP;
3768 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3769 start_cleanup_deferral ();
3770 store_expr (TREE_OPERAND (exp, 1), target, 0);
3771 end_cleanup_deferral ();
3772 emit_queue ();
3773 emit_jump_insn (gen_jump (lab2));
3774 emit_barrier ();
3775 emit_label (lab1);
3776 start_cleanup_deferral ();
3777 store_expr (TREE_OPERAND (exp, 2), target, 0);
3778 end_cleanup_deferral ();
3779 emit_queue ();
3780 emit_label (lab2);
3781 OK_DEFER_POP;
3782
3783 return want_value ? target : NULL_RTX;
3784 }
3785 else if (queued_subexp_p (target))
3786 /* If target contains a postincrement, let's not risk
3787 using it as the place to generate the rhs. */
3788 {
3789 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3790 {
3791 /* Expand EXP into a new pseudo. */
3792 temp = gen_reg_rtx (GET_MODE (target));
3793 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3794 }
3795 else
3796 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3797
3798 /* If target is volatile, ANSI requires accessing the value
3799 *from* the target, if it is accessed. So make that happen.
3800 In no case return the target itself. */
3801 if (! MEM_VOLATILE_P (target) && want_value)
3802 dont_return_target = 1;
3803 }
3804 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3805 && GET_MODE (target) != BLKmode)
3806 /* If target is in memory and caller wants value in a register instead,
3807 arrange that. Pass TARGET as target for expand_expr so that,
3808 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3809 We know expand_expr will not use the target in that case.
3810 Don't do this if TARGET is volatile because we are supposed
3811 to write it and then read it. */
3812 {
3813 temp = expand_expr (exp, target, GET_MODE (target), 0);
3814 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3815 temp = copy_to_reg (temp);
3816 dont_return_target = 1;
3817 }
3818 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3819 /* If this is an scalar in a register that is stored in a wider mode
3820 than the declared mode, compute the result into its declared mode
3821 and then convert to the wider mode. Our value is the computed
3822 expression. */
3823 {
3824 /* If we don't want a value, we can do the conversion inside EXP,
3825 which will often result in some optimizations. Do the conversion
3826 in two steps: first change the signedness, if needed, then
3827 the extend. But don't do this if the type of EXP is a subtype
3828 of something else since then the conversion might involve
3829 more than just converting modes. */
3830 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3831 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3832 {
3833 if (TREE_UNSIGNED (TREE_TYPE (exp))
3834 != SUBREG_PROMOTED_UNSIGNED_P (target))
3835 exp
3836 = convert
3837 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3838 TREE_TYPE (exp)),
3839 exp);
3840
3841 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3842 SUBREG_PROMOTED_UNSIGNED_P (target)),
3843 exp);
3844 }
3845
3846 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3847
3848 /* If TEMP is a volatile MEM and we want a result value, make
3849 the access now so it gets done only once. Likewise if
3850 it contains TARGET. */
3851 if (GET_CODE (temp) == MEM && want_value
3852 && (MEM_VOLATILE_P (temp)
3853 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3854 temp = copy_to_reg (temp);
3855
3856 /* If TEMP is a VOIDmode constant, use convert_modes to make
3857 sure that we properly convert it. */
3858 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3859 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3860 TYPE_MODE (TREE_TYPE (exp)), temp,
3861 SUBREG_PROMOTED_UNSIGNED_P (target));
3862
3863 convert_move (SUBREG_REG (target), temp,
3864 SUBREG_PROMOTED_UNSIGNED_P (target));
3865
3866 /* If we promoted a constant, change the mode back down to match
3867 target. Otherwise, the caller might get confused by a result whose
3868 mode is larger than expected. */
3869
3870 if (want_value && GET_MODE (temp) != GET_MODE (target)
3871 && GET_MODE (temp) != VOIDmode)
3872 {
3873 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3874 SUBREG_PROMOTED_VAR_P (temp) = 1;
3875 SUBREG_PROMOTED_UNSIGNED_P (temp)
3876 = SUBREG_PROMOTED_UNSIGNED_P (target);
3877 }
3878
3879 return want_value ? temp : NULL_RTX;
3880 }
3881 else
3882 {
3883 temp = expand_expr (exp, target, GET_MODE (target), 0);
3884 /* Return TARGET if it's a specified hardware register.
3885 If TARGET is a volatile mem ref, either return TARGET
3886 or return a reg copied *from* TARGET; ANSI requires this.
3887
3888 Otherwise, if TEMP is not TARGET, return TEMP
3889 if it is constant (for efficiency),
3890 or if we really want the correct value. */
3891 if (!(target && GET_CODE (target) == REG
3892 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3893 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3894 && ! rtx_equal_p (temp, target)
3895 && (CONSTANT_P (temp) || want_value))
3896 dont_return_target = 1;
3897 }
3898
3899 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3900 the same as that of TARGET, adjust the constant. This is needed, for
3901 example, in case it is a CONST_DOUBLE and we want only a word-sized
3902 value. */
3903 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3904 && TREE_CODE (exp) != ERROR_MARK
3905 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3906 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3907 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3908
3909 if (current_function_check_memory_usage
3910 && GET_CODE (target) == MEM
3911 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3912 {
3913 in_check_memory_usage = 1;
3914 if (GET_CODE (temp) == MEM)
3915 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3916 XEXP (target, 0), Pmode,
3917 XEXP (temp, 0), Pmode,
3918 expr_size (exp), TYPE_MODE (sizetype));
3919 else
3920 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3921 XEXP (target, 0), Pmode,
3922 expr_size (exp), TYPE_MODE (sizetype),
3923 GEN_INT (MEMORY_USE_WO),
3924 TYPE_MODE (integer_type_node));
3925 in_check_memory_usage = 0;
3926 }
3927
3928 /* If value was not generated in the target, store it there.
3929 Convert the value to TARGET's type first if nec. */
3930 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3931 one or both of them are volatile memory refs, we have to distinguish
3932 two cases:
3933 - expand_expr has used TARGET. In this case, we must not generate
3934 another copy. This can be detected by TARGET being equal according
3935 to == .
3936 - expand_expr has not used TARGET - that means that the source just
3937 happens to have the same RTX form. Since temp will have been created
3938 by expand_expr, it will compare unequal according to == .
3939 We must generate a copy in this case, to reach the correct number
3940 of volatile memory references. */
3941
3942 if ((! rtx_equal_p (temp, target)
3943 || (temp != target && (side_effects_p (temp)
3944 || side_effects_p (target))))
3945 && TREE_CODE (exp) != ERROR_MARK)
3946 {
3947 target = protect_from_queue (target, 1);
3948 if (GET_MODE (temp) != GET_MODE (target)
3949 && GET_MODE (temp) != VOIDmode)
3950 {
3951 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3952 if (dont_return_target)
3953 {
3954 /* In this case, we will return TEMP,
3955 so make sure it has the proper mode.
3956 But don't forget to store the value into TARGET. */
3957 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3958 emit_move_insn (target, temp);
3959 }
3960 else
3961 convert_move (target, temp, unsignedp);
3962 }
3963
3964 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3965 {
3966 /* Handle copying a string constant into an array.
3967 The string constant may be shorter than the array.
3968 So copy just the string's actual length, and clear the rest. */
3969 rtx size;
3970 rtx addr;
3971
3972 /* Get the size of the data type of the string,
3973 which is actually the size of the target. */
3974 size = expr_size (exp);
3975 if (GET_CODE (size) == CONST_INT
3976 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3977 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3978 else
3979 {
3980 /* Compute the size of the data to copy from the string. */
3981 tree copy_size
3982 = size_binop (MIN_EXPR,
3983 make_tree (sizetype, size),
3984 size_int (TREE_STRING_LENGTH (exp)));
3985 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
3986 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3987 VOIDmode, 0);
3988 rtx label = 0;
3989
3990 /* Copy that much. */
3991 emit_block_move (target, temp, copy_size_rtx,
3992 TYPE_ALIGN (TREE_TYPE (exp)));
3993
3994 /* Figure out how much is left in TARGET that we have to clear.
3995 Do all calculations in ptr_mode. */
3996
3997 addr = XEXP (target, 0);
3998 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3999
4000 if (GET_CODE (copy_size_rtx) == CONST_INT)
4001 {
4002 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4003 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4004 align = MIN (align, (BITS_PER_UNIT
4005 * (INTVAL (copy_size_rtx)
4006 & - INTVAL (copy_size_rtx))));
4007 }
4008 else
4009 {
4010 addr = force_reg (ptr_mode, addr);
4011 addr = expand_binop (ptr_mode, add_optab, addr,
4012 copy_size_rtx, NULL_RTX, 0,
4013 OPTAB_LIB_WIDEN);
4014
4015 size = expand_binop (ptr_mode, sub_optab, size,
4016 copy_size_rtx, NULL_RTX, 0,
4017 OPTAB_LIB_WIDEN);
4018
4019 align = BITS_PER_UNIT;
4020 label = gen_label_rtx ();
4021 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4022 GET_MODE (size), 0, 0, label);
4023 }
4024 align = MIN (align, expr_align (copy_size));
4025
4026 if (size != const0_rtx)
4027 {
4028 rtx dest = gen_rtx_MEM (BLKmode, addr);
4029
4030 MEM_COPY_ATTRIBUTES (dest, target);
4031
4032 /* Be sure we can write on ADDR. */
4033 in_check_memory_usage = 1;
4034 if (current_function_check_memory_usage)
4035 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
4036 addr, Pmode,
4037 size, TYPE_MODE (sizetype),
4038 GEN_INT (MEMORY_USE_WO),
4039 TYPE_MODE (integer_type_node));
4040 in_check_memory_usage = 0;
4041 clear_storage (dest, size, align);
4042 }
4043
4044 if (label)
4045 emit_label (label);
4046 }
4047 }
4048 /* Handle calls that return values in multiple non-contiguous locations.
4049 The Irix 6 ABI has examples of this. */
4050 else if (GET_CODE (target) == PARALLEL)
4051 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4052 TYPE_ALIGN (TREE_TYPE (exp)));
4053 else if (GET_MODE (temp) == BLKmode)
4054 emit_block_move (target, temp, expr_size (exp),
4055 TYPE_ALIGN (TREE_TYPE (exp)));
4056 else
4057 emit_move_insn (target, temp);
4058 }
4059
4060 /* If we don't want a value, return NULL_RTX. */
4061 if (! want_value)
4062 return NULL_RTX;
4063
4064 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4065 ??? The latter test doesn't seem to make sense. */
4066 else if (dont_return_target && GET_CODE (temp) != MEM)
4067 return temp;
4068
4069 /* Return TARGET itself if it is a hard register. */
4070 else if (want_value && GET_MODE (target) != BLKmode
4071 && ! (GET_CODE (target) == REG
4072 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4073 return copy_to_reg (target);
4074
4075 else
4076 return target;
4077 }
4078 \f
4079 /* Return 1 if EXP just contains zeros. */
4080
4081 static int
4082 is_zeros_p (exp)
4083 tree exp;
4084 {
4085 tree elt;
4086
4087 switch (TREE_CODE (exp))
4088 {
4089 case CONVERT_EXPR:
4090 case NOP_EXPR:
4091 case NON_LVALUE_EXPR:
4092 return is_zeros_p (TREE_OPERAND (exp, 0));
4093
4094 case INTEGER_CST:
4095 return integer_zerop (exp);
4096
4097 case COMPLEX_CST:
4098 return
4099 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4100
4101 case REAL_CST:
4102 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4103
4104 case CONSTRUCTOR:
4105 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4106 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4107 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4108 if (! is_zeros_p (TREE_VALUE (elt)))
4109 return 0;
4110
4111 return 1;
4112
4113 default:
4114 return 0;
4115 }
4116 }
4117
4118 /* Return 1 if EXP contains mostly (3/4) zeros. */
4119
4120 static int
4121 mostly_zeros_p (exp)
4122 tree exp;
4123 {
4124 if (TREE_CODE (exp) == CONSTRUCTOR)
4125 {
4126 int elts = 0, zeros = 0;
4127 tree elt = CONSTRUCTOR_ELTS (exp);
4128 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4129 {
4130 /* If there are no ranges of true bits, it is all zero. */
4131 return elt == NULL_TREE;
4132 }
4133 for (; elt; elt = TREE_CHAIN (elt))
4134 {
4135 /* We do not handle the case where the index is a RANGE_EXPR,
4136 so the statistic will be somewhat inaccurate.
4137 We do make a more accurate count in store_constructor itself,
4138 so since this function is only used for nested array elements,
4139 this should be close enough. */
4140 if (mostly_zeros_p (TREE_VALUE (elt)))
4141 zeros++;
4142 elts++;
4143 }
4144
4145 return 4 * zeros >= 3 * elts;
4146 }
4147
4148 return is_zeros_p (exp);
4149 }
4150 \f
4151 /* Helper function for store_constructor.
4152 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4153 TYPE is the type of the CONSTRUCTOR, not the element type.
4154 ALIGN and CLEARED are as for store_constructor.
4155
4156 This provides a recursive shortcut back to store_constructor when it isn't
4157 necessary to go through store_field. This is so that we can pass through
4158 the cleared field to let store_constructor know that we may not have to
4159 clear a substructure if the outer structure has already been cleared. */
4160
4161 static void
4162 store_constructor_field (target, bitsize, bitpos,
4163 mode, exp, type, align, cleared)
4164 rtx target;
4165 unsigned HOST_WIDE_INT bitsize;
4166 HOST_WIDE_INT bitpos;
4167 enum machine_mode mode;
4168 tree exp, type;
4169 unsigned int align;
4170 int cleared;
4171 {
4172 if (TREE_CODE (exp) == CONSTRUCTOR
4173 && bitpos % BITS_PER_UNIT == 0
4174 /* If we have a non-zero bitpos for a register target, then we just
4175 let store_field do the bitfield handling. This is unlikely to
4176 generate unnecessary clear instructions anyways. */
4177 && (bitpos == 0 || GET_CODE (target) == MEM))
4178 {
4179 if (bitpos != 0)
4180 target
4181 = change_address (target,
4182 GET_MODE (target) == BLKmode
4183 || 0 != (bitpos
4184 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4185 ? BLKmode : VOIDmode,
4186 plus_constant (XEXP (target, 0),
4187 bitpos / BITS_PER_UNIT));
4188 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4189 }
4190 else
4191 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4192 int_size_in_bytes (type), 0);
4193 }
4194
4195 /* Store the value of constructor EXP into the rtx TARGET.
4196 TARGET is either a REG or a MEM.
4197 ALIGN is the maximum known alignment for TARGET.
4198 CLEARED is true if TARGET is known to have been zero'd.
4199 SIZE is the number of bytes of TARGET we are allowed to modify: this
4200 may not be the same as the size of EXP if we are assigning to a field
4201 which has been packed to exclude padding bits. */
4202
4203 static void
4204 store_constructor (exp, target, align, cleared, size)
4205 tree exp;
4206 rtx target;
4207 unsigned int align;
4208 int cleared;
4209 HOST_WIDE_INT size;
4210 {
4211 tree type = TREE_TYPE (exp);
4212 #ifdef WORD_REGISTER_OPERATIONS
4213 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4214 #endif
4215
4216 /* We know our target cannot conflict, since safe_from_p has been called. */
4217 #if 0
4218 /* Don't try copying piece by piece into a hard register
4219 since that is vulnerable to being clobbered by EXP.
4220 Instead, construct in a pseudo register and then copy it all. */
4221 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4222 {
4223 rtx temp = gen_reg_rtx (GET_MODE (target));
4224 store_constructor (exp, temp, align, cleared, size);
4225 emit_move_insn (target, temp);
4226 return;
4227 }
4228 #endif
4229
4230 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4231 || TREE_CODE (type) == QUAL_UNION_TYPE)
4232 {
4233 register tree elt;
4234
4235 /* Inform later passes that the whole union value is dead. */
4236 if ((TREE_CODE (type) == UNION_TYPE
4237 || TREE_CODE (type) == QUAL_UNION_TYPE)
4238 && ! cleared)
4239 {
4240 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4241
4242 /* If the constructor is empty, clear the union. */
4243 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4244 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4245 }
4246
4247 /* If we are building a static constructor into a register,
4248 set the initial value as zero so we can fold the value into
4249 a constant. But if more than one register is involved,
4250 this probably loses. */
4251 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4252 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4253 {
4254 if (! cleared)
4255 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4256
4257 cleared = 1;
4258 }
4259
4260 /* If the constructor has fewer fields than the structure
4261 or if we are initializing the structure to mostly zeros,
4262 clear the whole structure first. */
4263 else if (size > 0
4264 && ((list_length (CONSTRUCTOR_ELTS (exp))
4265 != fields_length (type))
4266 || mostly_zeros_p (exp)))
4267 {
4268 if (! cleared)
4269 clear_storage (target, GEN_INT (size), align);
4270
4271 cleared = 1;
4272 }
4273 else if (! cleared)
4274 /* Inform later passes that the old value is dead. */
4275 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4276
4277 /* Store each element of the constructor into
4278 the corresponding field of TARGET. */
4279
4280 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4281 {
4282 register tree field = TREE_PURPOSE (elt);
4283 #ifdef WORD_REGISTER_OPERATIONS
4284 tree value = TREE_VALUE (elt);
4285 #endif
4286 register enum machine_mode mode;
4287 HOST_WIDE_INT bitsize;
4288 HOST_WIDE_INT bitpos = 0;
4289 int unsignedp;
4290 tree offset;
4291 rtx to_rtx = target;
4292
4293 /* Just ignore missing fields.
4294 We cleared the whole structure, above,
4295 if any fields are missing. */
4296 if (field == 0)
4297 continue;
4298
4299 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4300 continue;
4301
4302 if (host_integerp (DECL_SIZE (field), 1))
4303 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4304 else
4305 bitsize = -1;
4306
4307 unsignedp = TREE_UNSIGNED (field);
4308 mode = DECL_MODE (field);
4309 if (DECL_BIT_FIELD (field))
4310 mode = VOIDmode;
4311
4312 offset = DECL_FIELD_OFFSET (field);
4313 if (host_integerp (offset, 0)
4314 && host_integerp (bit_position (field), 0))
4315 {
4316 bitpos = int_bit_position (field);
4317 offset = 0;
4318 }
4319 else
4320 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4321
4322 if (offset)
4323 {
4324 rtx offset_rtx;
4325
4326 if (contains_placeholder_p (offset))
4327 offset = build (WITH_RECORD_EXPR, sizetype,
4328 offset, make_tree (TREE_TYPE (exp), target));
4329
4330 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4331 if (GET_CODE (to_rtx) != MEM)
4332 abort ();
4333
4334 if (GET_MODE (offset_rtx) != ptr_mode)
4335 {
4336 #ifdef POINTERS_EXTEND_UNSIGNED
4337 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4338 #else
4339 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4340 #endif
4341 }
4342
4343 to_rtx
4344 = change_address (to_rtx, VOIDmode,
4345 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4346 force_reg (ptr_mode,
4347 offset_rtx)));
4348 align = DECL_OFFSET_ALIGN (field);
4349 }
4350
4351 if (TREE_READONLY (field))
4352 {
4353 if (GET_CODE (to_rtx) == MEM)
4354 to_rtx = copy_rtx (to_rtx);
4355
4356 RTX_UNCHANGING_P (to_rtx) = 1;
4357 }
4358
4359 #ifdef WORD_REGISTER_OPERATIONS
4360 /* If this initializes a field that is smaller than a word, at the
4361 start of a word, try to widen it to a full word.
4362 This special case allows us to output C++ member function
4363 initializations in a form that the optimizers can understand. */
4364 if (GET_CODE (target) == REG
4365 && bitsize < BITS_PER_WORD
4366 && bitpos % BITS_PER_WORD == 0
4367 && GET_MODE_CLASS (mode) == MODE_INT
4368 && TREE_CODE (value) == INTEGER_CST
4369 && exp_size >= 0
4370 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4371 {
4372 tree type = TREE_TYPE (value);
4373 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4374 {
4375 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4376 value = convert (type, value);
4377 }
4378 if (BYTES_BIG_ENDIAN)
4379 value
4380 = fold (build (LSHIFT_EXPR, type, value,
4381 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4382 bitsize = BITS_PER_WORD;
4383 mode = word_mode;
4384 }
4385 #endif
4386 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4387 TREE_VALUE (elt), type, align, cleared);
4388 }
4389 }
4390 else if (TREE_CODE (type) == ARRAY_TYPE)
4391 {
4392 register tree elt;
4393 register int i;
4394 int need_to_clear;
4395 tree domain = TYPE_DOMAIN (type);
4396 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4397 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4398 tree elttype = TREE_TYPE (type);
4399
4400 /* If the constructor has fewer elements than the array,
4401 clear the whole array first. Similarly if this is
4402 static constructor of a non-BLKmode object. */
4403 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4404 need_to_clear = 1;
4405 else
4406 {
4407 HOST_WIDE_INT count = 0, zero_count = 0;
4408 need_to_clear = 0;
4409 /* This loop is a more accurate version of the loop in
4410 mostly_zeros_p (it handles RANGE_EXPR in an index).
4411 It is also needed to check for missing elements. */
4412 for (elt = CONSTRUCTOR_ELTS (exp);
4413 elt != NULL_TREE;
4414 elt = TREE_CHAIN (elt))
4415 {
4416 tree index = TREE_PURPOSE (elt);
4417 HOST_WIDE_INT this_node_count;
4418
4419 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4420 {
4421 tree lo_index = TREE_OPERAND (index, 0);
4422 tree hi_index = TREE_OPERAND (index, 1);
4423
4424 if (! host_integerp (lo_index, 1)
4425 || ! host_integerp (hi_index, 1))
4426 {
4427 need_to_clear = 1;
4428 break;
4429 }
4430
4431 this_node_count = (tree_low_cst (hi_index, 1)
4432 - tree_low_cst (lo_index, 1) + 1);
4433 }
4434 else
4435 this_node_count = 1;
4436 count += this_node_count;
4437 if (mostly_zeros_p (TREE_VALUE (elt)))
4438 zero_count += this_node_count;
4439 }
4440 /* Clear the entire array first if there are any missing elements,
4441 or if the incidence of zero elements is >= 75%. */
4442 if (count < maxelt - minelt + 1
4443 || 4 * zero_count >= 3 * count)
4444 need_to_clear = 1;
4445 }
4446 if (need_to_clear && size > 0)
4447 {
4448 if (! cleared)
4449 clear_storage (target, GEN_INT (size), align);
4450 cleared = 1;
4451 }
4452 else
4453 /* Inform later passes that the old value is dead. */
4454 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4455
4456 /* Store each element of the constructor into
4457 the corresponding element of TARGET, determined
4458 by counting the elements. */
4459 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4460 elt;
4461 elt = TREE_CHAIN (elt), i++)
4462 {
4463 register enum machine_mode mode;
4464 HOST_WIDE_INT bitsize;
4465 HOST_WIDE_INT bitpos;
4466 int unsignedp;
4467 tree value = TREE_VALUE (elt);
4468 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4469 tree index = TREE_PURPOSE (elt);
4470 rtx xtarget = target;
4471
4472 if (cleared && is_zeros_p (value))
4473 continue;
4474
4475 unsignedp = TREE_UNSIGNED (elttype);
4476 mode = TYPE_MODE (elttype);
4477 if (mode == BLKmode)
4478 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4479 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4480 : -1);
4481 else
4482 bitsize = GET_MODE_BITSIZE (mode);
4483
4484 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4485 {
4486 tree lo_index = TREE_OPERAND (index, 0);
4487 tree hi_index = TREE_OPERAND (index, 1);
4488 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4489 struct nesting *loop;
4490 HOST_WIDE_INT lo, hi, count;
4491 tree position;
4492
4493 /* If the range is constant and "small", unroll the loop. */
4494 if (host_integerp (lo_index, 0)
4495 && host_integerp (hi_index, 0)
4496 && (lo = tree_low_cst (lo_index, 0),
4497 hi = tree_low_cst (hi_index, 0),
4498 count = hi - lo + 1,
4499 (GET_CODE (target) != MEM
4500 || count <= 2
4501 || (host_integerp (TYPE_SIZE (elttype), 1)
4502 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4503 <= 40 * 8)))))
4504 {
4505 lo -= minelt; hi -= minelt;
4506 for (; lo <= hi; lo++)
4507 {
4508 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4509 store_constructor_field (target, bitsize, bitpos, mode,
4510 value, type, align, cleared);
4511 }
4512 }
4513 else
4514 {
4515 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4516 loop_top = gen_label_rtx ();
4517 loop_end = gen_label_rtx ();
4518
4519 unsignedp = TREE_UNSIGNED (domain);
4520
4521 index = build_decl (VAR_DECL, NULL_TREE, domain);
4522
4523 DECL_RTL (index) = index_r
4524 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4525 &unsignedp, 0));
4526
4527 if (TREE_CODE (value) == SAVE_EXPR
4528 && SAVE_EXPR_RTL (value) == 0)
4529 {
4530 /* Make sure value gets expanded once before the
4531 loop. */
4532 expand_expr (value, const0_rtx, VOIDmode, 0);
4533 emit_queue ();
4534 }
4535 store_expr (lo_index, index_r, 0);
4536 loop = expand_start_loop (0);
4537
4538 /* Assign value to element index. */
4539 position
4540 = convert (ssizetype,
4541 fold (build (MINUS_EXPR, TREE_TYPE (index),
4542 index, TYPE_MIN_VALUE (domain))));
4543 position = size_binop (MULT_EXPR, position,
4544 convert (ssizetype,
4545 TYPE_SIZE_UNIT (elttype)));
4546
4547 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4548 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4549 xtarget = change_address (target, mode, addr);
4550 if (TREE_CODE (value) == CONSTRUCTOR)
4551 store_constructor (value, xtarget, align, cleared,
4552 bitsize / BITS_PER_UNIT);
4553 else
4554 store_expr (value, xtarget, 0);
4555
4556 expand_exit_loop_if_false (loop,
4557 build (LT_EXPR, integer_type_node,
4558 index, hi_index));
4559
4560 expand_increment (build (PREINCREMENT_EXPR,
4561 TREE_TYPE (index),
4562 index, integer_one_node), 0, 0);
4563 expand_end_loop ();
4564 emit_label (loop_end);
4565 }
4566 }
4567 else if ((index != 0 && ! host_integerp (index, 0))
4568 || ! host_integerp (TYPE_SIZE (elttype), 1))
4569 {
4570 rtx pos_rtx, addr;
4571 tree position;
4572
4573 if (index == 0)
4574 index = ssize_int (1);
4575
4576 if (minelt)
4577 index = convert (ssizetype,
4578 fold (build (MINUS_EXPR, index,
4579 TYPE_MIN_VALUE (domain))));
4580
4581 position = size_binop (MULT_EXPR, index,
4582 convert (ssizetype,
4583 TYPE_SIZE_UNIT (elttype)));
4584 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4585 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4586 xtarget = change_address (target, mode, addr);
4587 store_expr (value, xtarget, 0);
4588 }
4589 else
4590 {
4591 if (index != 0)
4592 bitpos = ((tree_low_cst (index, 0) - minelt)
4593 * tree_low_cst (TYPE_SIZE (elttype), 1));
4594 else
4595 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4596
4597 store_constructor_field (target, bitsize, bitpos, mode, value,
4598 type, align, cleared);
4599 }
4600 }
4601 }
4602
4603 /* Set constructor assignments. */
4604 else if (TREE_CODE (type) == SET_TYPE)
4605 {
4606 tree elt = CONSTRUCTOR_ELTS (exp);
4607 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4608 tree domain = TYPE_DOMAIN (type);
4609 tree domain_min, domain_max, bitlength;
4610
4611 /* The default implementation strategy is to extract the constant
4612 parts of the constructor, use that to initialize the target,
4613 and then "or" in whatever non-constant ranges we need in addition.
4614
4615 If a large set is all zero or all ones, it is
4616 probably better to set it using memset (if available) or bzero.
4617 Also, if a large set has just a single range, it may also be
4618 better to first clear all the first clear the set (using
4619 bzero/memset), and set the bits we want. */
4620
4621 /* Check for all zeros. */
4622 if (elt == NULL_TREE && size > 0)
4623 {
4624 if (!cleared)
4625 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4626 return;
4627 }
4628
4629 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4630 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4631 bitlength = size_binop (PLUS_EXPR,
4632 size_diffop (domain_max, domain_min),
4633 ssize_int (1));
4634
4635 nbits = tree_low_cst (bitlength, 1);
4636
4637 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4638 are "complicated" (more than one range), initialize (the
4639 constant parts) by copying from a constant. */
4640 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4641 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4642 {
4643 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4644 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4645 char *bit_buffer = (char *) alloca (nbits);
4646 HOST_WIDE_INT word = 0;
4647 unsigned int bit_pos = 0;
4648 unsigned int ibit = 0;
4649 unsigned int offset = 0; /* In bytes from beginning of set. */
4650
4651 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4652 for (;;)
4653 {
4654 if (bit_buffer[ibit])
4655 {
4656 if (BYTES_BIG_ENDIAN)
4657 word |= (1 << (set_word_size - 1 - bit_pos));
4658 else
4659 word |= 1 << bit_pos;
4660 }
4661
4662 bit_pos++; ibit++;
4663 if (bit_pos >= set_word_size || ibit == nbits)
4664 {
4665 if (word != 0 || ! cleared)
4666 {
4667 rtx datum = GEN_INT (word);
4668 rtx to_rtx;
4669
4670 /* The assumption here is that it is safe to use
4671 XEXP if the set is multi-word, but not if
4672 it's single-word. */
4673 if (GET_CODE (target) == MEM)
4674 {
4675 to_rtx = plus_constant (XEXP (target, 0), offset);
4676 to_rtx = change_address (target, mode, to_rtx);
4677 }
4678 else if (offset == 0)
4679 to_rtx = target;
4680 else
4681 abort ();
4682 emit_move_insn (to_rtx, datum);
4683 }
4684
4685 if (ibit == nbits)
4686 break;
4687 word = 0;
4688 bit_pos = 0;
4689 offset += set_word_size / BITS_PER_UNIT;
4690 }
4691 }
4692 }
4693 else if (!cleared)
4694 /* Don't bother clearing storage if the set is all ones. */
4695 if (TREE_CHAIN (elt) != NULL_TREE
4696 || (TREE_PURPOSE (elt) == NULL_TREE
4697 ? nbits != 1
4698 : ( ! host_integerp (TREE_VALUE (elt), 0)
4699 || ! host_integerp (TREE_PURPOSE (elt), 0)
4700 || (tree_low_cst (TREE_VALUE (elt), 0)
4701 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4702 != (HOST_WIDE_INT) nbits))))
4703 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4704
4705 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4706 {
4707 /* Start of range of element or NULL. */
4708 tree startbit = TREE_PURPOSE (elt);
4709 /* End of range of element, or element value. */
4710 tree endbit = TREE_VALUE (elt);
4711 #ifdef TARGET_MEM_FUNCTIONS
4712 HOST_WIDE_INT startb, endb;
4713 #endif
4714 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4715
4716 bitlength_rtx = expand_expr (bitlength,
4717 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4718
4719 /* Handle non-range tuple element like [ expr ]. */
4720 if (startbit == NULL_TREE)
4721 {
4722 startbit = save_expr (endbit);
4723 endbit = startbit;
4724 }
4725
4726 startbit = convert (sizetype, startbit);
4727 endbit = convert (sizetype, endbit);
4728 if (! integer_zerop (domain_min))
4729 {
4730 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4731 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4732 }
4733 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4734 EXPAND_CONST_ADDRESS);
4735 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4736 EXPAND_CONST_ADDRESS);
4737
4738 if (REG_P (target))
4739 {
4740 targetx = assign_stack_temp (GET_MODE (target),
4741 GET_MODE_SIZE (GET_MODE (target)),
4742 0);
4743 emit_move_insn (targetx, target);
4744 }
4745
4746 else if (GET_CODE (target) == MEM)
4747 targetx = target;
4748 else
4749 abort ();
4750
4751 #ifdef TARGET_MEM_FUNCTIONS
4752 /* Optimization: If startbit and endbit are
4753 constants divisible by BITS_PER_UNIT,
4754 call memset instead. */
4755 if (TREE_CODE (startbit) == INTEGER_CST
4756 && TREE_CODE (endbit) == INTEGER_CST
4757 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4758 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4759 {
4760 emit_library_call (memset_libfunc, 0,
4761 VOIDmode, 3,
4762 plus_constant (XEXP (targetx, 0),
4763 startb / BITS_PER_UNIT),
4764 Pmode,
4765 constm1_rtx, TYPE_MODE (integer_type_node),
4766 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4767 TYPE_MODE (sizetype));
4768 }
4769 else
4770 #endif
4771 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4772 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4773 bitlength_rtx, TYPE_MODE (sizetype),
4774 startbit_rtx, TYPE_MODE (sizetype),
4775 endbit_rtx, TYPE_MODE (sizetype));
4776
4777 if (REG_P (target))
4778 emit_move_insn (target, targetx);
4779 }
4780 }
4781
4782 else
4783 abort ();
4784 }
4785
4786 /* Store the value of EXP (an expression tree)
4787 into a subfield of TARGET which has mode MODE and occupies
4788 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4789 If MODE is VOIDmode, it means that we are storing into a bit-field.
4790
4791 If VALUE_MODE is VOIDmode, return nothing in particular.
4792 UNSIGNEDP is not used in this case.
4793
4794 Otherwise, return an rtx for the value stored. This rtx
4795 has mode VALUE_MODE if that is convenient to do.
4796 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4797
4798 ALIGN is the alignment that TARGET is known to have.
4799 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4800
4801 ALIAS_SET is the alias set for the destination. This value will
4802 (in general) be different from that for TARGET, since TARGET is a
4803 reference to the containing structure. */
4804
4805 static rtx
4806 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4807 unsignedp, align, total_size, alias_set)
4808 rtx target;
4809 HOST_WIDE_INT bitsize;
4810 HOST_WIDE_INT bitpos;
4811 enum machine_mode mode;
4812 tree exp;
4813 enum machine_mode value_mode;
4814 int unsignedp;
4815 unsigned int align;
4816 HOST_WIDE_INT total_size;
4817 int alias_set;
4818 {
4819 HOST_WIDE_INT width_mask = 0;
4820
4821 if (TREE_CODE (exp) == ERROR_MARK)
4822 return const0_rtx;
4823
4824 if (bitsize < HOST_BITS_PER_WIDE_INT)
4825 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4826
4827 /* If we are storing into an unaligned field of an aligned union that is
4828 in a register, we may have the mode of TARGET being an integer mode but
4829 MODE == BLKmode. In that case, get an aligned object whose size and
4830 alignment are the same as TARGET and store TARGET into it (we can avoid
4831 the store if the field being stored is the entire width of TARGET). Then
4832 call ourselves recursively to store the field into a BLKmode version of
4833 that object. Finally, load from the object into TARGET. This is not
4834 very efficient in general, but should only be slightly more expensive
4835 than the otherwise-required unaligned accesses. Perhaps this can be
4836 cleaned up later. */
4837
4838 if (mode == BLKmode
4839 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4840 {
4841 rtx object = assign_stack_temp (GET_MODE (target),
4842 GET_MODE_SIZE (GET_MODE (target)), 0);
4843 rtx blk_object = copy_rtx (object);
4844
4845 MEM_SET_IN_STRUCT_P (object, 1);
4846 MEM_SET_IN_STRUCT_P (blk_object, 1);
4847 PUT_MODE (blk_object, BLKmode);
4848
4849 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4850 emit_move_insn (object, target);
4851
4852 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4853 align, total_size, alias_set);
4854
4855 /* Even though we aren't returning target, we need to
4856 give it the updated value. */
4857 emit_move_insn (target, object);
4858
4859 return blk_object;
4860 }
4861
4862 if (GET_CODE (target) == CONCAT)
4863 {
4864 /* We're storing into a struct containing a single __complex. */
4865
4866 if (bitpos != 0)
4867 abort ();
4868 return store_expr (exp, target, 0);
4869 }
4870
4871 /* If the structure is in a register or if the component
4872 is a bit field, we cannot use addressing to access it.
4873 Use bit-field techniques or SUBREG to store in it. */
4874
4875 if (mode == VOIDmode
4876 || (mode != BLKmode && ! direct_store[(int) mode]
4877 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4878 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4879 || GET_CODE (target) == REG
4880 || GET_CODE (target) == SUBREG
4881 /* If the field isn't aligned enough to store as an ordinary memref,
4882 store it as a bit field. */
4883 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4884 && (align < GET_MODE_ALIGNMENT (mode)
4885 || bitpos % GET_MODE_ALIGNMENT (mode)))
4886 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4887 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4888 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4889 /* If the RHS and field are a constant size and the size of the
4890 RHS isn't the same size as the bitfield, we must use bitfield
4891 operations. */
4892 || (bitsize >= 0
4893 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4894 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4895 {
4896 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4897
4898 /* If BITSIZE is narrower than the size of the type of EXP
4899 we will be narrowing TEMP. Normally, what's wanted are the
4900 low-order bits. However, if EXP's type is a record and this is
4901 big-endian machine, we want the upper BITSIZE bits. */
4902 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4903 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4904 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4905 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4906 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4907 - bitsize),
4908 temp, 1);
4909
4910 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4911 MODE. */
4912 if (mode != VOIDmode && mode != BLKmode
4913 && mode != TYPE_MODE (TREE_TYPE (exp)))
4914 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4915
4916 /* If the modes of TARGET and TEMP are both BLKmode, both
4917 must be in memory and BITPOS must be aligned on a byte
4918 boundary. If so, we simply do a block copy. */
4919 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4920 {
4921 unsigned int exp_align = expr_align (exp);
4922
4923 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4924 || bitpos % BITS_PER_UNIT != 0)
4925 abort ();
4926
4927 target = change_address (target, VOIDmode,
4928 plus_constant (XEXP (target, 0),
4929 bitpos / BITS_PER_UNIT));
4930
4931 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4932 align = MIN (exp_align, align);
4933
4934 /* Find an alignment that is consistent with the bit position. */
4935 while ((bitpos % align) != 0)
4936 align >>= 1;
4937
4938 emit_block_move (target, temp,
4939 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4940 / BITS_PER_UNIT),
4941 align);
4942
4943 return value_mode == VOIDmode ? const0_rtx : target;
4944 }
4945
4946 /* Store the value in the bitfield. */
4947 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4948 if (value_mode != VOIDmode)
4949 {
4950 /* The caller wants an rtx for the value. */
4951 /* If possible, avoid refetching from the bitfield itself. */
4952 if (width_mask != 0
4953 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4954 {
4955 tree count;
4956 enum machine_mode tmode;
4957
4958 if (unsignedp)
4959 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4960 tmode = GET_MODE (temp);
4961 if (tmode == VOIDmode)
4962 tmode = value_mode;
4963 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4964 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4965 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4966 }
4967 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4968 NULL_RTX, value_mode, 0, align,
4969 total_size);
4970 }
4971 return const0_rtx;
4972 }
4973 else
4974 {
4975 rtx addr = XEXP (target, 0);
4976 rtx to_rtx;
4977
4978 /* If a value is wanted, it must be the lhs;
4979 so make the address stable for multiple use. */
4980
4981 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4982 && ! CONSTANT_ADDRESS_P (addr)
4983 /* A frame-pointer reference is already stable. */
4984 && ! (GET_CODE (addr) == PLUS
4985 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4986 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4987 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4988 addr = copy_to_reg (addr);
4989
4990 /* Now build a reference to just the desired component. */
4991
4992 to_rtx = copy_rtx (change_address (target, mode,
4993 plus_constant (addr,
4994 (bitpos
4995 / BITS_PER_UNIT))));
4996 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4997 MEM_ALIAS_SET (to_rtx) = alias_set;
4998
4999 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5000 }
5001 }
5002 \f
5003 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5004 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5005 ARRAY_REFs and find the ultimate containing object, which we return.
5006
5007 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5008 bit position, and *PUNSIGNEDP to the signedness of the field.
5009 If the position of the field is variable, we store a tree
5010 giving the variable offset (in units) in *POFFSET.
5011 This offset is in addition to the bit position.
5012 If the position is not variable, we store 0 in *POFFSET.
5013 We set *PALIGNMENT to the alignment of the address that will be
5014 computed. This is the alignment of the thing we return if *POFFSET
5015 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5016
5017 If any of the extraction expressions is volatile,
5018 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5019
5020 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5021 is a mode that can be used to access the field. In that case, *PBITSIZE
5022 is redundant.
5023
5024 If the field describes a variable-sized object, *PMODE is set to
5025 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5026 this case, but the address of the object can be found. */
5027
5028 tree
5029 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5030 punsignedp, pvolatilep, palignment)
5031 tree exp;
5032 HOST_WIDE_INT *pbitsize;
5033 HOST_WIDE_INT *pbitpos;
5034 tree *poffset;
5035 enum machine_mode *pmode;
5036 int *punsignedp;
5037 int *pvolatilep;
5038 unsigned int *palignment;
5039 {
5040 tree size_tree = 0;
5041 enum machine_mode mode = VOIDmode;
5042 tree offset = size_zero_node;
5043 tree bit_offset = bitsize_zero_node;
5044 unsigned int alignment = BIGGEST_ALIGNMENT;
5045 tree tem;
5046
5047 /* First get the mode, signedness, and size. We do this from just the
5048 outermost expression. */
5049 if (TREE_CODE (exp) == COMPONENT_REF)
5050 {
5051 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5052 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5053 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5054
5055 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5056 }
5057 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5058 {
5059 size_tree = TREE_OPERAND (exp, 1);
5060 *punsignedp = TREE_UNSIGNED (exp);
5061 }
5062 else
5063 {
5064 mode = TYPE_MODE (TREE_TYPE (exp));
5065 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5066
5067 if (mode == BLKmode)
5068 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5069 else
5070 *pbitsize = GET_MODE_BITSIZE (mode);
5071 }
5072
5073 if (size_tree != 0)
5074 {
5075 if (! host_integerp (size_tree, 1))
5076 mode = BLKmode, *pbitsize = -1;
5077 else
5078 *pbitsize = tree_low_cst (size_tree, 1);
5079 }
5080
5081 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5082 and find the ultimate containing object. */
5083 while (1)
5084 {
5085 if (TREE_CODE (exp) == BIT_FIELD_REF)
5086 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5087 else if (TREE_CODE (exp) == COMPONENT_REF)
5088 {
5089 tree field = TREE_OPERAND (exp, 1);
5090 tree this_offset = DECL_FIELD_OFFSET (field);
5091
5092 /* If this field hasn't been filled in yet, don't go
5093 past it. This should only happen when folding expressions
5094 made during type construction. */
5095 if (this_offset == 0)
5096 break;
5097 else if (! TREE_CONSTANT (this_offset)
5098 && contains_placeholder_p (this_offset))
5099 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5100
5101 offset = size_binop (PLUS_EXPR, offset, this_offset);
5102 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5103 DECL_FIELD_BIT_OFFSET (field));
5104
5105 if (! host_integerp (offset, 0))
5106 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5107 }
5108
5109 else if (TREE_CODE (exp) == ARRAY_REF)
5110 {
5111 tree index = TREE_OPERAND (exp, 1);
5112 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5113 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5114 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5115
5116 /* We assume all arrays have sizes that are a multiple of a byte.
5117 First subtract the lower bound, if any, in the type of the
5118 index, then convert to sizetype and multiply by the size of the
5119 array element. */
5120 if (low_bound != 0 && ! integer_zerop (low_bound))
5121 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5122 index, low_bound));
5123
5124 /* If the index has a self-referential type, pass it to a
5125 WITH_RECORD_EXPR; if the component size is, pass our
5126 component to one. */
5127 if (! TREE_CONSTANT (index)
5128 && contains_placeholder_p (index))
5129 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5130 if (! TREE_CONSTANT (unit_size)
5131 && contains_placeholder_p (unit_size))
5132 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5133 TREE_OPERAND (exp, 0));
5134
5135 offset = size_binop (PLUS_EXPR, offset,
5136 size_binop (MULT_EXPR,
5137 convert (sizetype, index),
5138 unit_size));
5139 }
5140
5141 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5142 && ! ((TREE_CODE (exp) == NOP_EXPR
5143 || TREE_CODE (exp) == CONVERT_EXPR)
5144 && (TYPE_MODE (TREE_TYPE (exp))
5145 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5146 break;
5147
5148 /* If any reference in the chain is volatile, the effect is volatile. */
5149 if (TREE_THIS_VOLATILE (exp))
5150 *pvolatilep = 1;
5151
5152 /* If the offset is non-constant already, then we can't assume any
5153 alignment more than the alignment here. */
5154 if (! TREE_CONSTANT (offset))
5155 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5156
5157 exp = TREE_OPERAND (exp, 0);
5158 }
5159
5160 if (DECL_P (exp))
5161 alignment = MIN (alignment, DECL_ALIGN (exp));
5162 else if (TREE_TYPE (exp) != 0)
5163 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5164
5165 /* If OFFSET is constant, see if we can return the whole thing as a
5166 constant bit position. Otherwise, split it up. */
5167 if (host_integerp (offset, 0)
5168 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5169 bitsize_unit_node))
5170 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5171 && host_integerp (tem, 0))
5172 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5173 else
5174 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5175
5176 *pmode = mode;
5177 *palignment = alignment;
5178 return exp;
5179 }
5180
5181 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5182
5183 static enum memory_use_mode
5184 get_memory_usage_from_modifier (modifier)
5185 enum expand_modifier modifier;
5186 {
5187 switch (modifier)
5188 {
5189 case EXPAND_NORMAL:
5190 case EXPAND_SUM:
5191 return MEMORY_USE_RO;
5192 break;
5193 case EXPAND_MEMORY_USE_WO:
5194 return MEMORY_USE_WO;
5195 break;
5196 case EXPAND_MEMORY_USE_RW:
5197 return MEMORY_USE_RW;
5198 break;
5199 case EXPAND_MEMORY_USE_DONT:
5200 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5201 MEMORY_USE_DONT, because they are modifiers to a call of
5202 expand_expr in the ADDR_EXPR case of expand_expr. */
5203 case EXPAND_CONST_ADDRESS:
5204 case EXPAND_INITIALIZER:
5205 return MEMORY_USE_DONT;
5206 case EXPAND_MEMORY_USE_BAD:
5207 default:
5208 abort ();
5209 }
5210 }
5211 \f
5212 /* Given an rtx VALUE that may contain additions and multiplications,
5213 return an equivalent value that just refers to a register or memory.
5214 This is done by generating instructions to perform the arithmetic
5215 and returning a pseudo-register containing the value.
5216
5217 The returned value may be a REG, SUBREG, MEM or constant. */
5218
5219 rtx
5220 force_operand (value, target)
5221 rtx value, target;
5222 {
5223 register optab binoptab = 0;
5224 /* Use a temporary to force order of execution of calls to
5225 `force_operand'. */
5226 rtx tmp;
5227 register rtx op2;
5228 /* Use subtarget as the target for operand 0 of a binary operation. */
5229 register rtx subtarget = get_subtarget (target);
5230
5231 /* Check for a PIC address load. */
5232 if (flag_pic
5233 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5234 && XEXP (value, 0) == pic_offset_table_rtx
5235 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5236 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5237 || GET_CODE (XEXP (value, 1)) == CONST))
5238 {
5239 if (!subtarget)
5240 subtarget = gen_reg_rtx (GET_MODE (value));
5241 emit_move_insn (subtarget, value);
5242 return subtarget;
5243 }
5244
5245 if (GET_CODE (value) == PLUS)
5246 binoptab = add_optab;
5247 else if (GET_CODE (value) == MINUS)
5248 binoptab = sub_optab;
5249 else if (GET_CODE (value) == MULT)
5250 {
5251 op2 = XEXP (value, 1);
5252 if (!CONSTANT_P (op2)
5253 && !(GET_CODE (op2) == REG && op2 != subtarget))
5254 subtarget = 0;
5255 tmp = force_operand (XEXP (value, 0), subtarget);
5256 return expand_mult (GET_MODE (value), tmp,
5257 force_operand (op2, NULL_RTX),
5258 target, 0);
5259 }
5260
5261 if (binoptab)
5262 {
5263 op2 = XEXP (value, 1);
5264 if (!CONSTANT_P (op2)
5265 && !(GET_CODE (op2) == REG && op2 != subtarget))
5266 subtarget = 0;
5267 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5268 {
5269 binoptab = add_optab;
5270 op2 = negate_rtx (GET_MODE (value), op2);
5271 }
5272
5273 /* Check for an addition with OP2 a constant integer and our first
5274 operand a PLUS of a virtual register and something else. In that
5275 case, we want to emit the sum of the virtual register and the
5276 constant first and then add the other value. This allows virtual
5277 register instantiation to simply modify the constant rather than
5278 creating another one around this addition. */
5279 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5280 && GET_CODE (XEXP (value, 0)) == PLUS
5281 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5282 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5283 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5284 {
5285 rtx temp = expand_binop (GET_MODE (value), binoptab,
5286 XEXP (XEXP (value, 0), 0), op2,
5287 subtarget, 0, OPTAB_LIB_WIDEN);
5288 return expand_binop (GET_MODE (value), binoptab, temp,
5289 force_operand (XEXP (XEXP (value, 0), 1), 0),
5290 target, 0, OPTAB_LIB_WIDEN);
5291 }
5292
5293 tmp = force_operand (XEXP (value, 0), subtarget);
5294 return expand_binop (GET_MODE (value), binoptab, tmp,
5295 force_operand (op2, NULL_RTX),
5296 target, 0, OPTAB_LIB_WIDEN);
5297 /* We give UNSIGNEDP = 0 to expand_binop
5298 because the only operations we are expanding here are signed ones. */
5299 }
5300 return value;
5301 }
5302 \f
5303 /* Subroutine of expand_expr:
5304 save the non-copied parts (LIST) of an expr (LHS), and return a list
5305 which can restore these values to their previous values,
5306 should something modify their storage. */
5307
5308 static tree
5309 save_noncopied_parts (lhs, list)
5310 tree lhs;
5311 tree list;
5312 {
5313 tree tail;
5314 tree parts = 0;
5315
5316 for (tail = list; tail; tail = TREE_CHAIN (tail))
5317 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5318 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5319 else
5320 {
5321 tree part = TREE_VALUE (tail);
5322 tree part_type = TREE_TYPE (part);
5323 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5324 rtx target = assign_temp (part_type, 0, 1, 1);
5325 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5326 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5327 parts = tree_cons (to_be_saved,
5328 build (RTL_EXPR, part_type, NULL_TREE,
5329 (tree) target),
5330 parts);
5331 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5332 }
5333 return parts;
5334 }
5335
5336 /* Subroutine of expand_expr:
5337 record the non-copied parts (LIST) of an expr (LHS), and return a list
5338 which specifies the initial values of these parts. */
5339
5340 static tree
5341 init_noncopied_parts (lhs, list)
5342 tree lhs;
5343 tree list;
5344 {
5345 tree tail;
5346 tree parts = 0;
5347
5348 for (tail = list; tail; tail = TREE_CHAIN (tail))
5349 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5350 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5351 else if (TREE_PURPOSE (tail))
5352 {
5353 tree part = TREE_VALUE (tail);
5354 tree part_type = TREE_TYPE (part);
5355 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5356 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5357 }
5358 return parts;
5359 }
5360
5361 /* Subroutine of expand_expr: return nonzero iff there is no way that
5362 EXP can reference X, which is being modified. TOP_P is nonzero if this
5363 call is going to be used to determine whether we need a temporary
5364 for EXP, as opposed to a recursive call to this function.
5365
5366 It is always safe for this routine to return zero since it merely
5367 searches for optimization opportunities. */
5368
5369 static int
5370 safe_from_p (x, exp, top_p)
5371 rtx x;
5372 tree exp;
5373 int top_p;
5374 {
5375 rtx exp_rtl = 0;
5376 int i, nops;
5377 static int save_expr_count;
5378 static int save_expr_size = 0;
5379 static tree *save_expr_rewritten;
5380 static tree save_expr_trees[256];
5381
5382 if (x == 0
5383 /* If EXP has varying size, we MUST use a target since we currently
5384 have no way of allocating temporaries of variable size
5385 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5386 So we assume here that something at a higher level has prevented a
5387 clash. This is somewhat bogus, but the best we can do. Only
5388 do this when X is BLKmode and when we are at the top level. */
5389 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5390 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5391 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5392 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5393 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5394 != INTEGER_CST)
5395 && GET_MODE (x) == BLKmode))
5396 return 1;
5397
5398 if (top_p && save_expr_size == 0)
5399 {
5400 int rtn;
5401
5402 save_expr_count = 0;
5403 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5404 save_expr_rewritten = &save_expr_trees[0];
5405
5406 rtn = safe_from_p (x, exp, 1);
5407
5408 for (i = 0; i < save_expr_count; ++i)
5409 {
5410 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5411 abort ();
5412 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5413 }
5414
5415 save_expr_size = 0;
5416
5417 return rtn;
5418 }
5419
5420 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5421 find the underlying pseudo. */
5422 if (GET_CODE (x) == SUBREG)
5423 {
5424 x = SUBREG_REG (x);
5425 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5426 return 0;
5427 }
5428
5429 /* If X is a location in the outgoing argument area, it is always safe. */
5430 if (GET_CODE (x) == MEM
5431 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5432 || (GET_CODE (XEXP (x, 0)) == PLUS
5433 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5434 return 1;
5435
5436 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5437 {
5438 case 'd':
5439 exp_rtl = DECL_RTL (exp);
5440 break;
5441
5442 case 'c':
5443 return 1;
5444
5445 case 'x':
5446 if (TREE_CODE (exp) == TREE_LIST)
5447 return ((TREE_VALUE (exp) == 0
5448 || safe_from_p (x, TREE_VALUE (exp), 0))
5449 && (TREE_CHAIN (exp) == 0
5450 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5451 else if (TREE_CODE (exp) == ERROR_MARK)
5452 return 1; /* An already-visited SAVE_EXPR? */
5453 else
5454 return 0;
5455
5456 case '1':
5457 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5458
5459 case '2':
5460 case '<':
5461 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5462 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5463
5464 case 'e':
5465 case 'r':
5466 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5467 the expression. If it is set, we conflict iff we are that rtx or
5468 both are in memory. Otherwise, we check all operands of the
5469 expression recursively. */
5470
5471 switch (TREE_CODE (exp))
5472 {
5473 case ADDR_EXPR:
5474 return (staticp (TREE_OPERAND (exp, 0))
5475 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5476 || TREE_STATIC (exp));
5477
5478 case INDIRECT_REF:
5479 if (GET_CODE (x) == MEM)
5480 return 0;
5481 break;
5482
5483 case CALL_EXPR:
5484 exp_rtl = CALL_EXPR_RTL (exp);
5485 if (exp_rtl == 0)
5486 {
5487 /* Assume that the call will clobber all hard registers and
5488 all of memory. */
5489 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5490 || GET_CODE (x) == MEM)
5491 return 0;
5492 }
5493
5494 break;
5495
5496 case RTL_EXPR:
5497 /* If a sequence exists, we would have to scan every instruction
5498 in the sequence to see if it was safe. This is probably not
5499 worthwhile. */
5500 if (RTL_EXPR_SEQUENCE (exp))
5501 return 0;
5502
5503 exp_rtl = RTL_EXPR_RTL (exp);
5504 break;
5505
5506 case WITH_CLEANUP_EXPR:
5507 exp_rtl = RTL_EXPR_RTL (exp);
5508 break;
5509
5510 case CLEANUP_POINT_EXPR:
5511 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5512
5513 case SAVE_EXPR:
5514 exp_rtl = SAVE_EXPR_RTL (exp);
5515 if (exp_rtl)
5516 break;
5517
5518 /* This SAVE_EXPR might appear many times in the top-level
5519 safe_from_p() expression, and if it has a complex
5520 subexpression, examining it multiple times could result
5521 in a combinatorial explosion. E.g. on an Alpha
5522 running at least 200MHz, a Fortran test case compiled with
5523 optimization took about 28 minutes to compile -- even though
5524 it was only a few lines long, and the complicated line causing
5525 so much time to be spent in the earlier version of safe_from_p()
5526 had only 293 or so unique nodes.
5527
5528 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5529 where it is so we can turn it back in the top-level safe_from_p()
5530 when we're done. */
5531
5532 /* For now, don't bother re-sizing the array. */
5533 if (save_expr_count >= save_expr_size)
5534 return 0;
5535 save_expr_rewritten[save_expr_count++] = exp;
5536
5537 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5538 for (i = 0; i < nops; i++)
5539 {
5540 tree operand = TREE_OPERAND (exp, i);
5541 if (operand == NULL_TREE)
5542 continue;
5543 TREE_SET_CODE (exp, ERROR_MARK);
5544 if (!safe_from_p (x, operand, 0))
5545 return 0;
5546 TREE_SET_CODE (exp, SAVE_EXPR);
5547 }
5548 TREE_SET_CODE (exp, ERROR_MARK);
5549 return 1;
5550
5551 case BIND_EXPR:
5552 /* The only operand we look at is operand 1. The rest aren't
5553 part of the expression. */
5554 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5555
5556 case METHOD_CALL_EXPR:
5557 /* This takes a rtx argument, but shouldn't appear here. */
5558 abort ();
5559
5560 default:
5561 break;
5562 }
5563
5564 /* If we have an rtx, we do not need to scan our operands. */
5565 if (exp_rtl)
5566 break;
5567
5568 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5569 for (i = 0; i < nops; i++)
5570 if (TREE_OPERAND (exp, i) != 0
5571 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5572 return 0;
5573 }
5574
5575 /* If we have an rtl, find any enclosed object. Then see if we conflict
5576 with it. */
5577 if (exp_rtl)
5578 {
5579 if (GET_CODE (exp_rtl) == SUBREG)
5580 {
5581 exp_rtl = SUBREG_REG (exp_rtl);
5582 if (GET_CODE (exp_rtl) == REG
5583 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5584 return 0;
5585 }
5586
5587 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5588 are memory and EXP is not readonly. */
5589 return ! (rtx_equal_p (x, exp_rtl)
5590 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5591 && ! TREE_READONLY (exp)));
5592 }
5593
5594 /* If we reach here, it is safe. */
5595 return 1;
5596 }
5597
5598 /* Subroutine of expand_expr: return nonzero iff EXP is an
5599 expression whose type is statically determinable. */
5600
5601 static int
5602 fixed_type_p (exp)
5603 tree exp;
5604 {
5605 if (TREE_CODE (exp) == PARM_DECL
5606 || TREE_CODE (exp) == VAR_DECL
5607 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5608 || TREE_CODE (exp) == COMPONENT_REF
5609 || TREE_CODE (exp) == ARRAY_REF)
5610 return 1;
5611 return 0;
5612 }
5613
5614 /* Subroutine of expand_expr: return rtx if EXP is a
5615 variable or parameter; else return 0. */
5616
5617 static rtx
5618 var_rtx (exp)
5619 tree exp;
5620 {
5621 STRIP_NOPS (exp);
5622 switch (TREE_CODE (exp))
5623 {
5624 case PARM_DECL:
5625 case VAR_DECL:
5626 return DECL_RTL (exp);
5627 default:
5628 return 0;
5629 }
5630 }
5631
5632 #ifdef MAX_INTEGER_COMPUTATION_MODE
5633 void
5634 check_max_integer_computation_mode (exp)
5635 tree exp;
5636 {
5637 enum tree_code code;
5638 enum machine_mode mode;
5639
5640 /* Strip any NOPs that don't change the mode. */
5641 STRIP_NOPS (exp);
5642 code = TREE_CODE (exp);
5643
5644 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5645 if (code == NOP_EXPR
5646 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5647 return;
5648
5649 /* First check the type of the overall operation. We need only look at
5650 unary, binary and relational operations. */
5651 if (TREE_CODE_CLASS (code) == '1'
5652 || TREE_CODE_CLASS (code) == '2'
5653 || TREE_CODE_CLASS (code) == '<')
5654 {
5655 mode = TYPE_MODE (TREE_TYPE (exp));
5656 if (GET_MODE_CLASS (mode) == MODE_INT
5657 && mode > MAX_INTEGER_COMPUTATION_MODE)
5658 fatal ("unsupported wide integer operation");
5659 }
5660
5661 /* Check operand of a unary op. */
5662 if (TREE_CODE_CLASS (code) == '1')
5663 {
5664 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5665 if (GET_MODE_CLASS (mode) == MODE_INT
5666 && mode > MAX_INTEGER_COMPUTATION_MODE)
5667 fatal ("unsupported wide integer operation");
5668 }
5669
5670 /* Check operands of a binary/comparison op. */
5671 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5672 {
5673 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5674 if (GET_MODE_CLASS (mode) == MODE_INT
5675 && mode > MAX_INTEGER_COMPUTATION_MODE)
5676 fatal ("unsupported wide integer operation");
5677
5678 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5679 if (GET_MODE_CLASS (mode) == MODE_INT
5680 && mode > MAX_INTEGER_COMPUTATION_MODE)
5681 fatal ("unsupported wide integer operation");
5682 }
5683 }
5684 #endif
5685 \f
5686 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5687 has any readonly fields. If any of the fields have types that
5688 contain readonly fields, return true as well. */
5689
5690 static int
5691 readonly_fields_p (type)
5692 tree type;
5693 {
5694 tree field;
5695
5696 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5697 if (TREE_CODE (field) == FIELD_DECL
5698 && (TREE_READONLY (field)
5699 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5700 && readonly_fields_p (TREE_TYPE (field)))))
5701 return 1;
5702
5703 return 0;
5704 }
5705 \f
5706 /* expand_expr: generate code for computing expression EXP.
5707 An rtx for the computed value is returned. The value is never null.
5708 In the case of a void EXP, const0_rtx is returned.
5709
5710 The value may be stored in TARGET if TARGET is nonzero.
5711 TARGET is just a suggestion; callers must assume that
5712 the rtx returned may not be the same as TARGET.
5713
5714 If TARGET is CONST0_RTX, it means that the value will be ignored.
5715
5716 If TMODE is not VOIDmode, it suggests generating the
5717 result in mode TMODE. But this is done only when convenient.
5718 Otherwise, TMODE is ignored and the value generated in its natural mode.
5719 TMODE is just a suggestion; callers must assume that
5720 the rtx returned may not have mode TMODE.
5721
5722 Note that TARGET may have neither TMODE nor MODE. In that case, it
5723 probably will not be used.
5724
5725 If MODIFIER is EXPAND_SUM then when EXP is an addition
5726 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5727 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5728 products as above, or REG or MEM, or constant.
5729 Ordinarily in such cases we would output mul or add instructions
5730 and then return a pseudo reg containing the sum.
5731
5732 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5733 it also marks a label as absolutely required (it can't be dead).
5734 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5735 This is used for outputting expressions used in initializers.
5736
5737 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5738 with a constant address even if that address is not normally legitimate.
5739 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5740
5741 rtx
5742 expand_expr (exp, target, tmode, modifier)
5743 register tree exp;
5744 rtx target;
5745 enum machine_mode tmode;
5746 enum expand_modifier modifier;
5747 {
5748 register rtx op0, op1, temp;
5749 tree type = TREE_TYPE (exp);
5750 int unsignedp = TREE_UNSIGNED (type);
5751 register enum machine_mode mode;
5752 register enum tree_code code = TREE_CODE (exp);
5753 optab this_optab;
5754 rtx subtarget, original_target;
5755 int ignore;
5756 tree context;
5757 /* Used by check-memory-usage to make modifier read only. */
5758 enum expand_modifier ro_modifier;
5759
5760 /* Handle ERROR_MARK before anybody tries to access its type. */
5761 if (TREE_CODE (exp) == ERROR_MARK)
5762 {
5763 op0 = CONST0_RTX (tmode);
5764 if (op0 != 0)
5765 return op0;
5766 return const0_rtx;
5767 }
5768
5769 mode = TYPE_MODE (type);
5770 /* Use subtarget as the target for operand 0 of a binary operation. */
5771 subtarget = get_subtarget (target);
5772 original_target = target;
5773 ignore = (target == const0_rtx
5774 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5775 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5776 || code == COND_EXPR)
5777 && TREE_CODE (type) == VOID_TYPE));
5778
5779 /* Make a read-only version of the modifier. */
5780 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5781 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5782 ro_modifier = modifier;
5783 else
5784 ro_modifier = EXPAND_NORMAL;
5785
5786 /* If we are going to ignore this result, we need only do something
5787 if there is a side-effect somewhere in the expression. If there
5788 is, short-circuit the most common cases here. Note that we must
5789 not call expand_expr with anything but const0_rtx in case this
5790 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5791
5792 if (ignore)
5793 {
5794 if (! TREE_SIDE_EFFECTS (exp))
5795 return const0_rtx;
5796
5797 /* Ensure we reference a volatile object even if value is ignored, but
5798 don't do this if all we are doing is taking its address. */
5799 if (TREE_THIS_VOLATILE (exp)
5800 && TREE_CODE (exp) != FUNCTION_DECL
5801 && mode != VOIDmode && mode != BLKmode
5802 && modifier != EXPAND_CONST_ADDRESS)
5803 {
5804 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5805 if (GET_CODE (temp) == MEM)
5806 temp = copy_to_reg (temp);
5807 return const0_rtx;
5808 }
5809
5810 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5811 || code == INDIRECT_REF || code == BUFFER_REF)
5812 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5813 VOIDmode, ro_modifier);
5814 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5815 || code == ARRAY_REF)
5816 {
5817 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5818 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5819 return const0_rtx;
5820 }
5821 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5822 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5823 /* If the second operand has no side effects, just evaluate
5824 the first. */
5825 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5826 VOIDmode, ro_modifier);
5827 else if (code == BIT_FIELD_REF)
5828 {
5829 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5830 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5831 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5832 return const0_rtx;
5833 }
5834 ;
5835 target = 0;
5836 }
5837
5838 #ifdef MAX_INTEGER_COMPUTATION_MODE
5839 /* Only check stuff here if the mode we want is different from the mode
5840 of the expression; if it's the same, check_max_integer_computiation_mode
5841 will handle it. Do we really need to check this stuff at all? */
5842
5843 if (target
5844 && GET_MODE (target) != mode
5845 && TREE_CODE (exp) != INTEGER_CST
5846 && TREE_CODE (exp) != PARM_DECL
5847 && TREE_CODE (exp) != ARRAY_REF
5848 && TREE_CODE (exp) != COMPONENT_REF
5849 && TREE_CODE (exp) != BIT_FIELD_REF
5850 && TREE_CODE (exp) != INDIRECT_REF
5851 && TREE_CODE (exp) != CALL_EXPR
5852 && TREE_CODE (exp) != VAR_DECL
5853 && TREE_CODE (exp) != RTL_EXPR)
5854 {
5855 enum machine_mode mode = GET_MODE (target);
5856
5857 if (GET_MODE_CLASS (mode) == MODE_INT
5858 && mode > MAX_INTEGER_COMPUTATION_MODE)
5859 fatal ("unsupported wide integer operation");
5860 }
5861
5862 if (tmode != mode
5863 && TREE_CODE (exp) != INTEGER_CST
5864 && TREE_CODE (exp) != PARM_DECL
5865 && TREE_CODE (exp) != ARRAY_REF
5866 && TREE_CODE (exp) != COMPONENT_REF
5867 && TREE_CODE (exp) != BIT_FIELD_REF
5868 && TREE_CODE (exp) != INDIRECT_REF
5869 && TREE_CODE (exp) != VAR_DECL
5870 && TREE_CODE (exp) != CALL_EXPR
5871 && TREE_CODE (exp) != RTL_EXPR
5872 && GET_MODE_CLASS (tmode) == MODE_INT
5873 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5874 fatal ("unsupported wide integer operation");
5875
5876 check_max_integer_computation_mode (exp);
5877 #endif
5878
5879 /* If will do cse, generate all results into pseudo registers
5880 since 1) that allows cse to find more things
5881 and 2) otherwise cse could produce an insn the machine
5882 cannot support. */
5883
5884 if (! cse_not_expected && mode != BLKmode && target
5885 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5886 target = subtarget;
5887
5888 switch (code)
5889 {
5890 case LABEL_DECL:
5891 {
5892 tree function = decl_function_context (exp);
5893 /* Handle using a label in a containing function. */
5894 if (function != current_function_decl
5895 && function != inline_function_decl && function != 0)
5896 {
5897 struct function *p = find_function_data (function);
5898 /* Allocate in the memory associated with the function
5899 that the label is in. */
5900 push_obstacks (p->function_obstack,
5901 p->function_maybepermanent_obstack);
5902
5903 p->expr->x_forced_labels
5904 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5905 p->expr->x_forced_labels);
5906 pop_obstacks ();
5907 }
5908 else
5909 {
5910 if (modifier == EXPAND_INITIALIZER)
5911 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5912 label_rtx (exp),
5913 forced_labels);
5914 }
5915
5916 temp = gen_rtx_MEM (FUNCTION_MODE,
5917 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5918 if (function != current_function_decl
5919 && function != inline_function_decl && function != 0)
5920 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5921 return temp;
5922 }
5923
5924 case PARM_DECL:
5925 if (DECL_RTL (exp) == 0)
5926 {
5927 error_with_decl (exp, "prior parameter's size depends on `%s'");
5928 return CONST0_RTX (mode);
5929 }
5930
5931 /* ... fall through ... */
5932
5933 case VAR_DECL:
5934 /* If a static var's type was incomplete when the decl was written,
5935 but the type is complete now, lay out the decl now. */
5936 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5937 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5938 {
5939 push_obstacks_nochange ();
5940 end_temporary_allocation ();
5941 layout_decl (exp, 0);
5942 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5943 pop_obstacks ();
5944 }
5945
5946 /* Although static-storage variables start off initialized, according to
5947 ANSI C, a memcpy could overwrite them with uninitialized values. So
5948 we check them too. This also lets us check for read-only variables
5949 accessed via a non-const declaration, in case it won't be detected
5950 any other way (e.g., in an embedded system or OS kernel without
5951 memory protection).
5952
5953 Aggregates are not checked here; they're handled elsewhere. */
5954 if (cfun && current_function_check_memory_usage
5955 && code == VAR_DECL
5956 && GET_CODE (DECL_RTL (exp)) == MEM
5957 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5958 {
5959 enum memory_use_mode memory_usage;
5960 memory_usage = get_memory_usage_from_modifier (modifier);
5961
5962 in_check_memory_usage = 1;
5963 if (memory_usage != MEMORY_USE_DONT)
5964 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5965 XEXP (DECL_RTL (exp), 0), Pmode,
5966 GEN_INT (int_size_in_bytes (type)),
5967 TYPE_MODE (sizetype),
5968 GEN_INT (memory_usage),
5969 TYPE_MODE (integer_type_node));
5970 in_check_memory_usage = 0;
5971 }
5972
5973 /* ... fall through ... */
5974
5975 case FUNCTION_DECL:
5976 case RESULT_DECL:
5977 if (DECL_RTL (exp) == 0)
5978 abort ();
5979
5980 /* Ensure variable marked as used even if it doesn't go through
5981 a parser. If it hasn't be used yet, write out an external
5982 definition. */
5983 if (! TREE_USED (exp))
5984 {
5985 assemble_external (exp);
5986 TREE_USED (exp) = 1;
5987 }
5988
5989 /* Show we haven't gotten RTL for this yet. */
5990 temp = 0;
5991
5992 /* Handle variables inherited from containing functions. */
5993 context = decl_function_context (exp);
5994
5995 /* We treat inline_function_decl as an alias for the current function
5996 because that is the inline function whose vars, types, etc.
5997 are being merged into the current function.
5998 See expand_inline_function. */
5999
6000 if (context != 0 && context != current_function_decl
6001 && context != inline_function_decl
6002 /* If var is static, we don't need a static chain to access it. */
6003 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6004 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6005 {
6006 rtx addr;
6007
6008 /* Mark as non-local and addressable. */
6009 DECL_NONLOCAL (exp) = 1;
6010 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6011 abort ();
6012 mark_addressable (exp);
6013 if (GET_CODE (DECL_RTL (exp)) != MEM)
6014 abort ();
6015 addr = XEXP (DECL_RTL (exp), 0);
6016 if (GET_CODE (addr) == MEM)
6017 addr = change_address (addr, Pmode,
6018 fix_lexical_addr (XEXP (addr, 0), exp));
6019 else
6020 addr = fix_lexical_addr (addr, exp);
6021
6022 temp = change_address (DECL_RTL (exp), mode, addr);
6023 }
6024
6025 /* This is the case of an array whose size is to be determined
6026 from its initializer, while the initializer is still being parsed.
6027 See expand_decl. */
6028
6029 else if (GET_CODE (DECL_RTL (exp)) == MEM
6030 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6031 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6032 XEXP (DECL_RTL (exp), 0));
6033
6034 /* If DECL_RTL is memory, we are in the normal case and either
6035 the address is not valid or it is not a register and -fforce-addr
6036 is specified, get the address into a register. */
6037
6038 else if (GET_CODE (DECL_RTL (exp)) == MEM
6039 && modifier != EXPAND_CONST_ADDRESS
6040 && modifier != EXPAND_SUM
6041 && modifier != EXPAND_INITIALIZER
6042 && (! memory_address_p (DECL_MODE (exp),
6043 XEXP (DECL_RTL (exp), 0))
6044 || (flag_force_addr
6045 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6046 temp = change_address (DECL_RTL (exp), VOIDmode,
6047 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6048
6049 /* If we got something, return it. But first, set the alignment
6050 the address is a register. */
6051 if (temp != 0)
6052 {
6053 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6054 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6055
6056 return temp;
6057 }
6058
6059 /* If the mode of DECL_RTL does not match that of the decl, it
6060 must be a promoted value. We return a SUBREG of the wanted mode,
6061 but mark it so that we know that it was already extended. */
6062
6063 if (GET_CODE (DECL_RTL (exp)) == REG
6064 && GET_MODE (DECL_RTL (exp)) != mode)
6065 {
6066 /* Get the signedness used for this variable. Ensure we get the
6067 same mode we got when the variable was declared. */
6068 if (GET_MODE (DECL_RTL (exp))
6069 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6070 abort ();
6071
6072 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6073 SUBREG_PROMOTED_VAR_P (temp) = 1;
6074 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6075 return temp;
6076 }
6077
6078 return DECL_RTL (exp);
6079
6080 case INTEGER_CST:
6081 return immed_double_const (TREE_INT_CST_LOW (exp),
6082 TREE_INT_CST_HIGH (exp), mode);
6083
6084 case CONST_DECL:
6085 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6086 EXPAND_MEMORY_USE_BAD);
6087
6088 case REAL_CST:
6089 /* If optimized, generate immediate CONST_DOUBLE
6090 which will be turned into memory by reload if necessary.
6091
6092 We used to force a register so that loop.c could see it. But
6093 this does not allow gen_* patterns to perform optimizations with
6094 the constants. It also produces two insns in cases like "x = 1.0;".
6095 On most machines, floating-point constants are not permitted in
6096 many insns, so we'd end up copying it to a register in any case.
6097
6098 Now, we do the copying in expand_binop, if appropriate. */
6099 return immed_real_const (exp);
6100
6101 case COMPLEX_CST:
6102 case STRING_CST:
6103 if (! TREE_CST_RTL (exp))
6104 output_constant_def (exp);
6105
6106 /* TREE_CST_RTL probably contains a constant address.
6107 On RISC machines where a constant address isn't valid,
6108 make some insns to get that address into a register. */
6109 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6110 && modifier != EXPAND_CONST_ADDRESS
6111 && modifier != EXPAND_INITIALIZER
6112 && modifier != EXPAND_SUM
6113 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6114 || (flag_force_addr
6115 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6116 return change_address (TREE_CST_RTL (exp), VOIDmode,
6117 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6118 return TREE_CST_RTL (exp);
6119
6120 case EXPR_WITH_FILE_LOCATION:
6121 {
6122 rtx to_return;
6123 const char *saved_input_filename = input_filename;
6124 int saved_lineno = lineno;
6125 input_filename = EXPR_WFL_FILENAME (exp);
6126 lineno = EXPR_WFL_LINENO (exp);
6127 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6128 emit_line_note (input_filename, lineno);
6129 /* Possibly avoid switching back and force here. */
6130 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6131 input_filename = saved_input_filename;
6132 lineno = saved_lineno;
6133 return to_return;
6134 }
6135
6136 case SAVE_EXPR:
6137 context = decl_function_context (exp);
6138
6139 /* If this SAVE_EXPR was at global context, assume we are an
6140 initialization function and move it into our context. */
6141 if (context == 0)
6142 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6143
6144 /* We treat inline_function_decl as an alias for the current function
6145 because that is the inline function whose vars, types, etc.
6146 are being merged into the current function.
6147 See expand_inline_function. */
6148 if (context == current_function_decl || context == inline_function_decl)
6149 context = 0;
6150
6151 /* If this is non-local, handle it. */
6152 if (context)
6153 {
6154 /* The following call just exists to abort if the context is
6155 not of a containing function. */
6156 find_function_data (context);
6157
6158 temp = SAVE_EXPR_RTL (exp);
6159 if (temp && GET_CODE (temp) == REG)
6160 {
6161 put_var_into_stack (exp);
6162 temp = SAVE_EXPR_RTL (exp);
6163 }
6164 if (temp == 0 || GET_CODE (temp) != MEM)
6165 abort ();
6166 return change_address (temp, mode,
6167 fix_lexical_addr (XEXP (temp, 0), exp));
6168 }
6169 if (SAVE_EXPR_RTL (exp) == 0)
6170 {
6171 if (mode == VOIDmode)
6172 temp = const0_rtx;
6173 else
6174 temp = assign_temp (type, 3, 0, 0);
6175
6176 SAVE_EXPR_RTL (exp) = temp;
6177 if (!optimize && GET_CODE (temp) == REG)
6178 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6179 save_expr_regs);
6180
6181 /* If the mode of TEMP does not match that of the expression, it
6182 must be a promoted value. We pass store_expr a SUBREG of the
6183 wanted mode but mark it so that we know that it was already
6184 extended. Note that `unsignedp' was modified above in
6185 this case. */
6186
6187 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6188 {
6189 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6190 SUBREG_PROMOTED_VAR_P (temp) = 1;
6191 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6192 }
6193
6194 if (temp == const0_rtx)
6195 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6196 EXPAND_MEMORY_USE_BAD);
6197 else
6198 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6199
6200 TREE_USED (exp) = 1;
6201 }
6202
6203 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6204 must be a promoted value. We return a SUBREG of the wanted mode,
6205 but mark it so that we know that it was already extended. */
6206
6207 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6208 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6209 {
6210 /* Compute the signedness and make the proper SUBREG. */
6211 promote_mode (type, mode, &unsignedp, 0);
6212 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6213 SUBREG_PROMOTED_VAR_P (temp) = 1;
6214 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6215 return temp;
6216 }
6217
6218 return SAVE_EXPR_RTL (exp);
6219
6220 case UNSAVE_EXPR:
6221 {
6222 rtx temp;
6223 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6224 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6225 return temp;
6226 }
6227
6228 case PLACEHOLDER_EXPR:
6229 {
6230 tree placeholder_expr;
6231
6232 /* If there is an object on the head of the placeholder list,
6233 see if some object in it of type TYPE or a pointer to it. For
6234 further information, see tree.def. */
6235 for (placeholder_expr = placeholder_list;
6236 placeholder_expr != 0;
6237 placeholder_expr = TREE_CHAIN (placeholder_expr))
6238 {
6239 tree need_type = TYPE_MAIN_VARIANT (type);
6240 tree object = 0;
6241 tree old_list = placeholder_list;
6242 tree elt;
6243
6244 /* Find the outermost reference that is of the type we want.
6245 If none, see if any object has a type that is a pointer to
6246 the type we want. */
6247 for (elt = TREE_PURPOSE (placeholder_expr);
6248 elt != 0 && object == 0;
6249 elt
6250 = ((TREE_CODE (elt) == COMPOUND_EXPR
6251 || TREE_CODE (elt) == COND_EXPR)
6252 ? TREE_OPERAND (elt, 1)
6253 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6254 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6255 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6256 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6257 ? TREE_OPERAND (elt, 0) : 0))
6258 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6259 object = elt;
6260
6261 for (elt = TREE_PURPOSE (placeholder_expr);
6262 elt != 0 && object == 0;
6263 elt
6264 = ((TREE_CODE (elt) == COMPOUND_EXPR
6265 || TREE_CODE (elt) == COND_EXPR)
6266 ? TREE_OPERAND (elt, 1)
6267 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6268 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6269 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6270 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6271 ? TREE_OPERAND (elt, 0) : 0))
6272 if (POINTER_TYPE_P (TREE_TYPE (elt))
6273 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6274 == need_type))
6275 object = build1 (INDIRECT_REF, need_type, elt);
6276
6277 if (object != 0)
6278 {
6279 /* Expand this object skipping the list entries before
6280 it was found in case it is also a PLACEHOLDER_EXPR.
6281 In that case, we want to translate it using subsequent
6282 entries. */
6283 placeholder_list = TREE_CHAIN (placeholder_expr);
6284 temp = expand_expr (object, original_target, tmode,
6285 ro_modifier);
6286 placeholder_list = old_list;
6287 return temp;
6288 }
6289 }
6290 }
6291
6292 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6293 abort ();
6294
6295 case WITH_RECORD_EXPR:
6296 /* Put the object on the placeholder list, expand our first operand,
6297 and pop the list. */
6298 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6299 placeholder_list);
6300 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6301 tmode, ro_modifier);
6302 placeholder_list = TREE_CHAIN (placeholder_list);
6303 return target;
6304
6305 case GOTO_EXPR:
6306 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6307 expand_goto (TREE_OPERAND (exp, 0));
6308 else
6309 expand_computed_goto (TREE_OPERAND (exp, 0));
6310 return const0_rtx;
6311
6312 case EXIT_EXPR:
6313 expand_exit_loop_if_false (NULL_PTR,
6314 invert_truthvalue (TREE_OPERAND (exp, 0)));
6315 return const0_rtx;
6316
6317 case LABELED_BLOCK_EXPR:
6318 if (LABELED_BLOCK_BODY (exp))
6319 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6320 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6321 return const0_rtx;
6322
6323 case EXIT_BLOCK_EXPR:
6324 if (EXIT_BLOCK_RETURN (exp))
6325 sorry ("returned value in block_exit_expr");
6326 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6327 return const0_rtx;
6328
6329 case LOOP_EXPR:
6330 push_temp_slots ();
6331 expand_start_loop (1);
6332 expand_expr_stmt (TREE_OPERAND (exp, 0));
6333 expand_end_loop ();
6334 pop_temp_slots ();
6335
6336 return const0_rtx;
6337
6338 case BIND_EXPR:
6339 {
6340 tree vars = TREE_OPERAND (exp, 0);
6341 int vars_need_expansion = 0;
6342
6343 /* Need to open a binding contour here because
6344 if there are any cleanups they must be contained here. */
6345 expand_start_bindings (2);
6346
6347 /* Mark the corresponding BLOCK for output in its proper place. */
6348 if (TREE_OPERAND (exp, 2) != 0
6349 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6350 insert_block (TREE_OPERAND (exp, 2));
6351
6352 /* If VARS have not yet been expanded, expand them now. */
6353 while (vars)
6354 {
6355 if (DECL_RTL (vars) == 0)
6356 {
6357 vars_need_expansion = 1;
6358 expand_decl (vars);
6359 }
6360 expand_decl_init (vars);
6361 vars = TREE_CHAIN (vars);
6362 }
6363
6364 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6365
6366 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6367
6368 return temp;
6369 }
6370
6371 case RTL_EXPR:
6372 if (RTL_EXPR_SEQUENCE (exp))
6373 {
6374 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6375 abort ();
6376 emit_insns (RTL_EXPR_SEQUENCE (exp));
6377 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6378 }
6379 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6380 free_temps_for_rtl_expr (exp);
6381 return RTL_EXPR_RTL (exp);
6382
6383 case CONSTRUCTOR:
6384 /* If we don't need the result, just ensure we evaluate any
6385 subexpressions. */
6386 if (ignore)
6387 {
6388 tree elt;
6389 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6390 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6391 EXPAND_MEMORY_USE_BAD);
6392 return const0_rtx;
6393 }
6394
6395 /* All elts simple constants => refer to a constant in memory. But
6396 if this is a non-BLKmode mode, let it store a field at a time
6397 since that should make a CONST_INT or CONST_DOUBLE when we
6398 fold. Likewise, if we have a target we can use, it is best to
6399 store directly into the target unless the type is large enough
6400 that memcpy will be used. If we are making an initializer and
6401 all operands are constant, put it in memory as well. */
6402 else if ((TREE_STATIC (exp)
6403 && ((mode == BLKmode
6404 && ! (target != 0 && safe_from_p (target, exp, 1)))
6405 || TREE_ADDRESSABLE (exp)
6406 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6407 && (! MOVE_BY_PIECES_P
6408 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6409 TYPE_ALIGN (type)))
6410 && ! mostly_zeros_p (exp))))
6411 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6412 {
6413 rtx constructor = output_constant_def (exp);
6414
6415 if (modifier != EXPAND_CONST_ADDRESS
6416 && modifier != EXPAND_INITIALIZER
6417 && modifier != EXPAND_SUM
6418 && (! memory_address_p (GET_MODE (constructor),
6419 XEXP (constructor, 0))
6420 || (flag_force_addr
6421 && GET_CODE (XEXP (constructor, 0)) != REG)))
6422 constructor = change_address (constructor, VOIDmode,
6423 XEXP (constructor, 0));
6424 return constructor;
6425 }
6426
6427 else
6428 {
6429 /* Handle calls that pass values in multiple non-contiguous
6430 locations. The Irix 6 ABI has examples of this. */
6431 if (target == 0 || ! safe_from_p (target, exp, 1)
6432 || GET_CODE (target) == PARALLEL)
6433 {
6434 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6435 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6436 else
6437 target = assign_temp (type, 0, 1, 1);
6438 }
6439
6440 if (TREE_READONLY (exp))
6441 {
6442 if (GET_CODE (target) == MEM)
6443 target = copy_rtx (target);
6444
6445 RTX_UNCHANGING_P (target) = 1;
6446 }
6447
6448 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6449 int_size_in_bytes (TREE_TYPE (exp)));
6450 return target;
6451 }
6452
6453 case INDIRECT_REF:
6454 {
6455 tree exp1 = TREE_OPERAND (exp, 0);
6456 tree index;
6457 tree string = string_constant (exp1, &index);
6458
6459 /* Try to optimize reads from const strings. */
6460 if (string
6461 && TREE_CODE (string) == STRING_CST
6462 && TREE_CODE (index) == INTEGER_CST
6463 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6464 && GET_MODE_CLASS (mode) == MODE_INT
6465 && GET_MODE_SIZE (mode) == 1
6466 && modifier != EXPAND_MEMORY_USE_WO)
6467 return
6468 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6469
6470 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6471 op0 = memory_address (mode, op0);
6472
6473 if (cfun && current_function_check_memory_usage
6474 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6475 {
6476 enum memory_use_mode memory_usage;
6477 memory_usage = get_memory_usage_from_modifier (modifier);
6478
6479 if (memory_usage != MEMORY_USE_DONT)
6480 {
6481 in_check_memory_usage = 1;
6482 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6483 op0, Pmode,
6484 GEN_INT (int_size_in_bytes (type)),
6485 TYPE_MODE (sizetype),
6486 GEN_INT (memory_usage),
6487 TYPE_MODE (integer_type_node));
6488 in_check_memory_usage = 0;
6489 }
6490 }
6491
6492 temp = gen_rtx_MEM (mode, op0);
6493 set_mem_attributes (temp, exp, 0);
6494
6495 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6496 here, because, in C and C++, the fact that a location is accessed
6497 through a pointer to const does not mean that the value there can
6498 never change. Languages where it can never change should
6499 also set TREE_STATIC. */
6500 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6501
6502 /* If we are writing to this object and its type is a record with
6503 readonly fields, we must mark it as readonly so it will
6504 conflict with readonly references to those fields. */
6505 if (modifier == EXPAND_MEMORY_USE_WO
6506 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6507 RTX_UNCHANGING_P (temp) = 1;
6508
6509 return temp;
6510 }
6511
6512 case ARRAY_REF:
6513 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6514 abort ();
6515
6516 {
6517 tree array = TREE_OPERAND (exp, 0);
6518 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6519 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6520 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6521 HOST_WIDE_INT i;
6522
6523 /* Optimize the special-case of a zero lower bound.
6524
6525 We convert the low_bound to sizetype to avoid some problems
6526 with constant folding. (E.g. suppose the lower bound is 1,
6527 and its mode is QI. Without the conversion, (ARRAY
6528 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6529 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6530
6531 if (! integer_zerop (low_bound))
6532 index = size_diffop (index, convert (sizetype, low_bound));
6533
6534 /* Fold an expression like: "foo"[2].
6535 This is not done in fold so it won't happen inside &.
6536 Don't fold if this is for wide characters since it's too
6537 difficult to do correctly and this is a very rare case. */
6538
6539 if (TREE_CODE (array) == STRING_CST
6540 && TREE_CODE (index) == INTEGER_CST
6541 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6542 && GET_MODE_CLASS (mode) == MODE_INT
6543 && GET_MODE_SIZE (mode) == 1)
6544 return
6545 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6546
6547 /* If this is a constant index into a constant array,
6548 just get the value from the array. Handle both the cases when
6549 we have an explicit constructor and when our operand is a variable
6550 that was declared const. */
6551
6552 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6553 && TREE_CODE (index) == INTEGER_CST
6554 && 0 > compare_tree_int (index,
6555 list_length (CONSTRUCTOR_ELTS
6556 (TREE_OPERAND (exp, 0)))))
6557 {
6558 tree elem;
6559
6560 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6561 i = TREE_INT_CST_LOW (index);
6562 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6563 ;
6564
6565 if (elem)
6566 return expand_expr (fold (TREE_VALUE (elem)), target,
6567 tmode, ro_modifier);
6568 }
6569
6570 else if (optimize >= 1
6571 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6572 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6573 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6574 {
6575 if (TREE_CODE (index) == INTEGER_CST)
6576 {
6577 tree init = DECL_INITIAL (array);
6578
6579 if (TREE_CODE (init) == CONSTRUCTOR)
6580 {
6581 tree elem;
6582
6583 for (elem = CONSTRUCTOR_ELTS (init);
6584 (elem
6585 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6586 elem = TREE_CHAIN (elem))
6587 ;
6588
6589 if (elem)
6590 return expand_expr (fold (TREE_VALUE (elem)), target,
6591 tmode, ro_modifier);
6592 }
6593 else if (TREE_CODE (init) == STRING_CST
6594 && 0 > compare_tree_int (index,
6595 TREE_STRING_LENGTH (init)))
6596 return (GEN_INT
6597 (TREE_STRING_POINTER
6598 (init)[TREE_INT_CST_LOW (index)]));
6599 }
6600 }
6601 }
6602 /* Fall through. */
6603
6604 case COMPONENT_REF:
6605 case BIT_FIELD_REF:
6606 /* If the operand is a CONSTRUCTOR, we can just extract the
6607 appropriate field if it is present. Don't do this if we have
6608 already written the data since we want to refer to that copy
6609 and varasm.c assumes that's what we'll do. */
6610 if (code != ARRAY_REF
6611 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6612 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6613 {
6614 tree elt;
6615
6616 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6617 elt = TREE_CHAIN (elt))
6618 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6619 /* We can normally use the value of the field in the
6620 CONSTRUCTOR. However, if this is a bitfield in
6621 an integral mode that we can fit in a HOST_WIDE_INT,
6622 we must mask only the number of bits in the bitfield,
6623 since this is done implicitly by the constructor. If
6624 the bitfield does not meet either of those conditions,
6625 we can't do this optimization. */
6626 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6627 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6628 == MODE_INT)
6629 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6630 <= HOST_BITS_PER_WIDE_INT))))
6631 {
6632 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6633 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6634 {
6635 HOST_WIDE_INT bitsize
6636 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6637
6638 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6639 {
6640 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6641 op0 = expand_and (op0, op1, target);
6642 }
6643 else
6644 {
6645 enum machine_mode imode
6646 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6647 tree count
6648 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6649 0);
6650
6651 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6652 target, 0);
6653 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6654 target, 0);
6655 }
6656 }
6657
6658 return op0;
6659 }
6660 }
6661
6662 {
6663 enum machine_mode mode1;
6664 HOST_WIDE_INT bitsize, bitpos;
6665 tree offset;
6666 int volatilep = 0;
6667 unsigned int alignment;
6668 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6669 &mode1, &unsignedp, &volatilep,
6670 &alignment);
6671
6672 /* If we got back the original object, something is wrong. Perhaps
6673 we are evaluating an expression too early. In any event, don't
6674 infinitely recurse. */
6675 if (tem == exp)
6676 abort ();
6677
6678 /* If TEM's type is a union of variable size, pass TARGET to the inner
6679 computation, since it will need a temporary and TARGET is known
6680 to have to do. This occurs in unchecked conversion in Ada. */
6681
6682 op0 = expand_expr (tem,
6683 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6684 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6685 != INTEGER_CST)
6686 ? target : NULL_RTX),
6687 VOIDmode,
6688 (modifier == EXPAND_INITIALIZER
6689 || modifier == EXPAND_CONST_ADDRESS)
6690 ? modifier : EXPAND_NORMAL);
6691
6692 /* If this is a constant, put it into a register if it is a
6693 legitimate constant and OFFSET is 0 and memory if it isn't. */
6694 if (CONSTANT_P (op0))
6695 {
6696 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6697 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6698 && offset == 0)
6699 op0 = force_reg (mode, op0);
6700 else
6701 op0 = validize_mem (force_const_mem (mode, op0));
6702 }
6703
6704 if (offset != 0)
6705 {
6706 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6707
6708 /* If this object is in memory, put it into a register.
6709 This case can't occur in C, but can in Ada if we have
6710 unchecked conversion of an expression from a scalar type to
6711 an array or record type. */
6712 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6713 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6714 {
6715 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6716
6717 mark_temp_addr_taken (memloc);
6718 emit_move_insn (memloc, op0);
6719 op0 = memloc;
6720 }
6721
6722 if (GET_CODE (op0) != MEM)
6723 abort ();
6724
6725 if (GET_MODE (offset_rtx) != ptr_mode)
6726 {
6727 #ifdef POINTERS_EXTEND_UNSIGNED
6728 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6729 #else
6730 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6731 #endif
6732 }
6733
6734 /* A constant address in OP0 can have VOIDmode, we must not try
6735 to call force_reg for that case. Avoid that case. */
6736 if (GET_CODE (op0) == MEM
6737 && GET_MODE (op0) == BLKmode
6738 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6739 && bitsize != 0
6740 && (bitpos % bitsize) == 0
6741 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6742 && alignment == GET_MODE_ALIGNMENT (mode1))
6743 {
6744 rtx temp = change_address (op0, mode1,
6745 plus_constant (XEXP (op0, 0),
6746 (bitpos /
6747 BITS_PER_UNIT)));
6748 if (GET_CODE (XEXP (temp, 0)) == REG)
6749 op0 = temp;
6750 else
6751 op0 = change_address (op0, mode1,
6752 force_reg (GET_MODE (XEXP (temp, 0)),
6753 XEXP (temp, 0)));
6754 bitpos = 0;
6755 }
6756
6757 op0 = change_address (op0, VOIDmode,
6758 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6759 force_reg (ptr_mode,
6760 offset_rtx)));
6761 }
6762
6763 /* Don't forget about volatility even if this is a bitfield. */
6764 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6765 {
6766 op0 = copy_rtx (op0);
6767 MEM_VOLATILE_P (op0) = 1;
6768 }
6769
6770 /* Check the access. */
6771 if (cfun != 0 && current_function_check_memory_usage
6772 && GET_CODE (op0) == MEM)
6773 {
6774 enum memory_use_mode memory_usage;
6775 memory_usage = get_memory_usage_from_modifier (modifier);
6776
6777 if (memory_usage != MEMORY_USE_DONT)
6778 {
6779 rtx to;
6780 int size;
6781
6782 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6783 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6784
6785 /* Check the access right of the pointer. */
6786 in_check_memory_usage = 1;
6787 if (size > BITS_PER_UNIT)
6788 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6789 to, Pmode,
6790 GEN_INT (size / BITS_PER_UNIT),
6791 TYPE_MODE (sizetype),
6792 GEN_INT (memory_usage),
6793 TYPE_MODE (integer_type_node));
6794 in_check_memory_usage = 0;
6795 }
6796 }
6797
6798 /* In cases where an aligned union has an unaligned object
6799 as a field, we might be extracting a BLKmode value from
6800 an integer-mode (e.g., SImode) object. Handle this case
6801 by doing the extract into an object as wide as the field
6802 (which we know to be the width of a basic mode), then
6803 storing into memory, and changing the mode to BLKmode.
6804 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6805 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6806 if (mode1 == VOIDmode
6807 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6808 || (modifier != EXPAND_CONST_ADDRESS
6809 && modifier != EXPAND_INITIALIZER
6810 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6811 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6812 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6813 /* If the field isn't aligned enough to fetch as a memref,
6814 fetch it as a bit field. */
6815 || (mode1 != BLKmode
6816 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6817 && ((TYPE_ALIGN (TREE_TYPE (tem))
6818 < GET_MODE_ALIGNMENT (mode))
6819 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6820 /* If the type and the field are a constant size and the
6821 size of the type isn't the same size as the bitfield,
6822 we must use bitfield operations. */
6823 || ((bitsize >= 0
6824 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6825 == INTEGER_CST)
6826 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6827 bitsize)))))
6828 || (modifier != EXPAND_CONST_ADDRESS
6829 && modifier != EXPAND_INITIALIZER
6830 && mode == BLKmode
6831 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6832 && (TYPE_ALIGN (type) > alignment
6833 || bitpos % TYPE_ALIGN (type) != 0)))
6834 {
6835 enum machine_mode ext_mode = mode;
6836
6837 if (ext_mode == BLKmode
6838 && ! (target != 0 && GET_CODE (op0) == MEM
6839 && GET_CODE (target) == MEM
6840 && bitpos % BITS_PER_UNIT == 0))
6841 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6842
6843 if (ext_mode == BLKmode)
6844 {
6845 /* In this case, BITPOS must start at a byte boundary and
6846 TARGET, if specified, must be a MEM. */
6847 if (GET_CODE (op0) != MEM
6848 || (target != 0 && GET_CODE (target) != MEM)
6849 || bitpos % BITS_PER_UNIT != 0)
6850 abort ();
6851
6852 op0 = change_address (op0, VOIDmode,
6853 plus_constant (XEXP (op0, 0),
6854 bitpos / BITS_PER_UNIT));
6855 if (target == 0)
6856 target = assign_temp (type, 0, 1, 1);
6857
6858 emit_block_move (target, op0,
6859 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6860 / BITS_PER_UNIT),
6861 BITS_PER_UNIT);
6862
6863 return target;
6864 }
6865
6866 op0 = validize_mem (op0);
6867
6868 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6869 mark_reg_pointer (XEXP (op0, 0), alignment);
6870
6871 op0 = extract_bit_field (op0, bitsize, bitpos,
6872 unsignedp, target, ext_mode, ext_mode,
6873 alignment,
6874 int_size_in_bytes (TREE_TYPE (tem)));
6875
6876 /* If the result is a record type and BITSIZE is narrower than
6877 the mode of OP0, an integral mode, and this is a big endian
6878 machine, we must put the field into the high-order bits. */
6879 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6880 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6881 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6882 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6883 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6884 - bitsize),
6885 op0, 1);
6886
6887 if (mode == BLKmode)
6888 {
6889 rtx new = assign_stack_temp (ext_mode,
6890 bitsize / BITS_PER_UNIT, 0);
6891
6892 emit_move_insn (new, op0);
6893 op0 = copy_rtx (new);
6894 PUT_MODE (op0, BLKmode);
6895 MEM_SET_IN_STRUCT_P (op0, 1);
6896 }
6897
6898 return op0;
6899 }
6900
6901 /* If the result is BLKmode, use that to access the object
6902 now as well. */
6903 if (mode == BLKmode)
6904 mode1 = BLKmode;
6905
6906 /* Get a reference to just this component. */
6907 if (modifier == EXPAND_CONST_ADDRESS
6908 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6909 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6910 (bitpos / BITS_PER_UNIT)));
6911 else
6912 op0 = change_address (op0, mode1,
6913 plus_constant (XEXP (op0, 0),
6914 (bitpos / BITS_PER_UNIT)));
6915
6916 set_mem_attributes (op0, exp, 0);
6917 if (GET_CODE (XEXP (op0, 0)) == REG)
6918 mark_reg_pointer (XEXP (op0, 0), alignment);
6919
6920 MEM_VOLATILE_P (op0) |= volatilep;
6921 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6922 || modifier == EXPAND_CONST_ADDRESS
6923 || modifier == EXPAND_INITIALIZER)
6924 return op0;
6925 else if (target == 0)
6926 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6927
6928 convert_move (target, op0, unsignedp);
6929 return target;
6930 }
6931
6932 /* Intended for a reference to a buffer of a file-object in Pascal.
6933 But it's not certain that a special tree code will really be
6934 necessary for these. INDIRECT_REF might work for them. */
6935 case BUFFER_REF:
6936 abort ();
6937
6938 case IN_EXPR:
6939 {
6940 /* Pascal set IN expression.
6941
6942 Algorithm:
6943 rlo = set_low - (set_low%bits_per_word);
6944 the_word = set [ (index - rlo)/bits_per_word ];
6945 bit_index = index % bits_per_word;
6946 bitmask = 1 << bit_index;
6947 return !!(the_word & bitmask); */
6948
6949 tree set = TREE_OPERAND (exp, 0);
6950 tree index = TREE_OPERAND (exp, 1);
6951 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6952 tree set_type = TREE_TYPE (set);
6953 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6954 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6955 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6956 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6957 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6958 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6959 rtx setaddr = XEXP (setval, 0);
6960 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6961 rtx rlow;
6962 rtx diff, quo, rem, addr, bit, result;
6963
6964 preexpand_calls (exp);
6965
6966 /* If domain is empty, answer is no. Likewise if index is constant
6967 and out of bounds. */
6968 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6969 && TREE_CODE (set_low_bound) == INTEGER_CST
6970 && tree_int_cst_lt (set_high_bound, set_low_bound))
6971 || (TREE_CODE (index) == INTEGER_CST
6972 && TREE_CODE (set_low_bound) == INTEGER_CST
6973 && tree_int_cst_lt (index, set_low_bound))
6974 || (TREE_CODE (set_high_bound) == INTEGER_CST
6975 && TREE_CODE (index) == INTEGER_CST
6976 && tree_int_cst_lt (set_high_bound, index))))
6977 return const0_rtx;
6978
6979 if (target == 0)
6980 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6981
6982 /* If we get here, we have to generate the code for both cases
6983 (in range and out of range). */
6984
6985 op0 = gen_label_rtx ();
6986 op1 = gen_label_rtx ();
6987
6988 if (! (GET_CODE (index_val) == CONST_INT
6989 && GET_CODE (lo_r) == CONST_INT))
6990 {
6991 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6992 GET_MODE (index_val), iunsignedp, 0, op1);
6993 }
6994
6995 if (! (GET_CODE (index_val) == CONST_INT
6996 && GET_CODE (hi_r) == CONST_INT))
6997 {
6998 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6999 GET_MODE (index_val), iunsignedp, 0, op1);
7000 }
7001
7002 /* Calculate the element number of bit zero in the first word
7003 of the set. */
7004 if (GET_CODE (lo_r) == CONST_INT)
7005 rlow = GEN_INT (INTVAL (lo_r)
7006 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7007 else
7008 rlow = expand_binop (index_mode, and_optab, lo_r,
7009 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7010 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7011
7012 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7013 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7014
7015 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7016 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7017 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7018 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7019
7020 addr = memory_address (byte_mode,
7021 expand_binop (index_mode, add_optab, diff,
7022 setaddr, NULL_RTX, iunsignedp,
7023 OPTAB_LIB_WIDEN));
7024
7025 /* Extract the bit we want to examine. */
7026 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7027 gen_rtx_MEM (byte_mode, addr),
7028 make_tree (TREE_TYPE (index), rem),
7029 NULL_RTX, 1);
7030 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7031 GET_MODE (target) == byte_mode ? target : 0,
7032 1, OPTAB_LIB_WIDEN);
7033
7034 if (result != target)
7035 convert_move (target, result, 1);
7036
7037 /* Output the code to handle the out-of-range case. */
7038 emit_jump (op0);
7039 emit_label (op1);
7040 emit_move_insn (target, const0_rtx);
7041 emit_label (op0);
7042 return target;
7043 }
7044
7045 case WITH_CLEANUP_EXPR:
7046 if (RTL_EXPR_RTL (exp) == 0)
7047 {
7048 RTL_EXPR_RTL (exp)
7049 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7050 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7051
7052 /* That's it for this cleanup. */
7053 TREE_OPERAND (exp, 2) = 0;
7054 }
7055 return RTL_EXPR_RTL (exp);
7056
7057 case CLEANUP_POINT_EXPR:
7058 {
7059 /* Start a new binding layer that will keep track of all cleanup
7060 actions to be performed. */
7061 expand_start_bindings (2);
7062
7063 target_temp_slot_level = temp_slot_level;
7064
7065 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7066 /* If we're going to use this value, load it up now. */
7067 if (! ignore)
7068 op0 = force_not_mem (op0);
7069 preserve_temp_slots (op0);
7070 expand_end_bindings (NULL_TREE, 0, 0);
7071 }
7072 return op0;
7073
7074 case CALL_EXPR:
7075 /* Check for a built-in function. */
7076 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7077 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7078 == FUNCTION_DECL)
7079 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7080 return expand_builtin (exp, target, subtarget, tmode, ignore);
7081
7082 /* If this call was expanded already by preexpand_calls,
7083 just return the result we got. */
7084 if (CALL_EXPR_RTL (exp) != 0)
7085 return CALL_EXPR_RTL (exp);
7086
7087 return expand_call (exp, target, ignore);
7088
7089 case NON_LVALUE_EXPR:
7090 case NOP_EXPR:
7091 case CONVERT_EXPR:
7092 case REFERENCE_EXPR:
7093 if (TREE_OPERAND (exp, 0) == error_mark_node)
7094 return const0_rtx;
7095
7096 if (TREE_CODE (type) == UNION_TYPE)
7097 {
7098 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7099
7100 /* If both input and output are BLKmode, this conversion
7101 isn't actually doing anything unless we need to make the
7102 alignment stricter. */
7103 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7104 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7105 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7106 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7107 modifier);
7108
7109 if (target == 0)
7110 {
7111 if (mode != BLKmode)
7112 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7113 else
7114 target = assign_temp (type, 0, 1, 1);
7115 }
7116
7117 if (GET_CODE (target) == MEM)
7118 /* Store data into beginning of memory target. */
7119 store_expr (TREE_OPERAND (exp, 0),
7120 change_address (target, TYPE_MODE (valtype), 0), 0);
7121
7122 else if (GET_CODE (target) == REG)
7123 /* Store this field into a union of the proper type. */
7124 store_field (target,
7125 MIN ((int_size_in_bytes (TREE_TYPE
7126 (TREE_OPERAND (exp, 0)))
7127 * BITS_PER_UNIT),
7128 GET_MODE_BITSIZE (mode)),
7129 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7130 VOIDmode, 0, BITS_PER_UNIT,
7131 int_size_in_bytes (type), 0);
7132 else
7133 abort ();
7134
7135 /* Return the entire union. */
7136 return target;
7137 }
7138
7139 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7140 {
7141 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7142 ro_modifier);
7143
7144 /* If the signedness of the conversion differs and OP0 is
7145 a promoted SUBREG, clear that indication since we now
7146 have to do the proper extension. */
7147 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7148 && GET_CODE (op0) == SUBREG)
7149 SUBREG_PROMOTED_VAR_P (op0) = 0;
7150
7151 return op0;
7152 }
7153
7154 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7155 if (GET_MODE (op0) == mode)
7156 return op0;
7157
7158 /* If OP0 is a constant, just convert it into the proper mode. */
7159 if (CONSTANT_P (op0))
7160 return
7161 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7162 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7163
7164 if (modifier == EXPAND_INITIALIZER)
7165 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7166
7167 if (target == 0)
7168 return
7169 convert_to_mode (mode, op0,
7170 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7171 else
7172 convert_move (target, op0,
7173 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7174 return target;
7175
7176 case PLUS_EXPR:
7177 /* We come here from MINUS_EXPR when the second operand is a
7178 constant. */
7179 plus_expr:
7180 this_optab = add_optab;
7181
7182 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7183 something else, make sure we add the register to the constant and
7184 then to the other thing. This case can occur during strength
7185 reduction and doing it this way will produce better code if the
7186 frame pointer or argument pointer is eliminated.
7187
7188 fold-const.c will ensure that the constant is always in the inner
7189 PLUS_EXPR, so the only case we need to do anything about is if
7190 sp, ap, or fp is our second argument, in which case we must swap
7191 the innermost first argument and our second argument. */
7192
7193 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7194 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7195 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7196 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7197 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7198 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7199 {
7200 tree t = TREE_OPERAND (exp, 1);
7201
7202 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7203 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7204 }
7205
7206 /* If the result is to be ptr_mode and we are adding an integer to
7207 something, we might be forming a constant. So try to use
7208 plus_constant. If it produces a sum and we can't accept it,
7209 use force_operand. This allows P = &ARR[const] to generate
7210 efficient code on machines where a SYMBOL_REF is not a valid
7211 address.
7212
7213 If this is an EXPAND_SUM call, always return the sum. */
7214 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7215 || mode == ptr_mode)
7216 {
7217 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7218 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7219 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7220 {
7221 rtx constant_part;
7222
7223 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7224 EXPAND_SUM);
7225 /* Use immed_double_const to ensure that the constant is
7226 truncated according to the mode of OP1, then sign extended
7227 to a HOST_WIDE_INT. Using the constant directly can result
7228 in non-canonical RTL in a 64x32 cross compile. */
7229 constant_part
7230 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7231 (HOST_WIDE_INT) 0,
7232 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7233 op1 = plus_constant (op1, INTVAL (constant_part));
7234 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7235 op1 = force_operand (op1, target);
7236 return op1;
7237 }
7238
7239 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7240 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7241 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7242 {
7243 rtx constant_part;
7244
7245 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7246 EXPAND_SUM);
7247 if (! CONSTANT_P (op0))
7248 {
7249 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7250 VOIDmode, modifier);
7251 /* Don't go to both_summands if modifier
7252 says it's not right to return a PLUS. */
7253 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7254 goto binop2;
7255 goto both_summands;
7256 }
7257 /* Use immed_double_const to ensure that the constant is
7258 truncated according to the mode of OP1, then sign extended
7259 to a HOST_WIDE_INT. Using the constant directly can result
7260 in non-canonical RTL in a 64x32 cross compile. */
7261 constant_part
7262 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7263 (HOST_WIDE_INT) 0,
7264 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7265 op0 = plus_constant (op0, INTVAL (constant_part));
7266 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7267 op0 = force_operand (op0, target);
7268 return op0;
7269 }
7270 }
7271
7272 /* No sense saving up arithmetic to be done
7273 if it's all in the wrong mode to form part of an address.
7274 And force_operand won't know whether to sign-extend or
7275 zero-extend. */
7276 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7277 || mode != ptr_mode)
7278 goto binop;
7279
7280 preexpand_calls (exp);
7281 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7282 subtarget = 0;
7283
7284 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7285 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7286
7287 both_summands:
7288 /* Make sure any term that's a sum with a constant comes last. */
7289 if (GET_CODE (op0) == PLUS
7290 && CONSTANT_P (XEXP (op0, 1)))
7291 {
7292 temp = op0;
7293 op0 = op1;
7294 op1 = temp;
7295 }
7296 /* If adding to a sum including a constant,
7297 associate it to put the constant outside. */
7298 if (GET_CODE (op1) == PLUS
7299 && CONSTANT_P (XEXP (op1, 1)))
7300 {
7301 rtx constant_term = const0_rtx;
7302
7303 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7304 if (temp != 0)
7305 op0 = temp;
7306 /* Ensure that MULT comes first if there is one. */
7307 else if (GET_CODE (op0) == MULT)
7308 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7309 else
7310 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7311
7312 /* Let's also eliminate constants from op0 if possible. */
7313 op0 = eliminate_constant_term (op0, &constant_term);
7314
7315 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7316 their sum should be a constant. Form it into OP1, since the
7317 result we want will then be OP0 + OP1. */
7318
7319 temp = simplify_binary_operation (PLUS, mode, constant_term,
7320 XEXP (op1, 1));
7321 if (temp != 0)
7322 op1 = temp;
7323 else
7324 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7325 }
7326
7327 /* Put a constant term last and put a multiplication first. */
7328 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7329 temp = op1, op1 = op0, op0 = temp;
7330
7331 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7332 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7333
7334 case MINUS_EXPR:
7335 /* For initializers, we are allowed to return a MINUS of two
7336 symbolic constants. Here we handle all cases when both operands
7337 are constant. */
7338 /* Handle difference of two symbolic constants,
7339 for the sake of an initializer. */
7340 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7341 && really_constant_p (TREE_OPERAND (exp, 0))
7342 && really_constant_p (TREE_OPERAND (exp, 1)))
7343 {
7344 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7345 VOIDmode, ro_modifier);
7346 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7347 VOIDmode, ro_modifier);
7348
7349 /* If the last operand is a CONST_INT, use plus_constant of
7350 the negated constant. Else make the MINUS. */
7351 if (GET_CODE (op1) == CONST_INT)
7352 return plus_constant (op0, - INTVAL (op1));
7353 else
7354 return gen_rtx_MINUS (mode, op0, op1);
7355 }
7356 /* Convert A - const to A + (-const). */
7357 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7358 {
7359 tree negated = fold (build1 (NEGATE_EXPR, type,
7360 TREE_OPERAND (exp, 1)));
7361
7362 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7363 /* If we can't negate the constant in TYPE, leave it alone and
7364 expand_binop will negate it for us. We used to try to do it
7365 here in the signed version of TYPE, but that doesn't work
7366 on POINTER_TYPEs. */;
7367 else
7368 {
7369 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7370 goto plus_expr;
7371 }
7372 }
7373 this_optab = sub_optab;
7374 goto binop;
7375
7376 case MULT_EXPR:
7377 preexpand_calls (exp);
7378 /* If first operand is constant, swap them.
7379 Thus the following special case checks need only
7380 check the second operand. */
7381 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7382 {
7383 register tree t1 = TREE_OPERAND (exp, 0);
7384 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7385 TREE_OPERAND (exp, 1) = t1;
7386 }
7387
7388 /* Attempt to return something suitable for generating an
7389 indexed address, for machines that support that. */
7390
7391 if (modifier == EXPAND_SUM && mode == ptr_mode
7392 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7393 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7394 {
7395 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7396 EXPAND_SUM);
7397
7398 /* Apply distributive law if OP0 is x+c. */
7399 if (GET_CODE (op0) == PLUS
7400 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7401 return
7402 gen_rtx_PLUS
7403 (mode,
7404 gen_rtx_MULT
7405 (mode, XEXP (op0, 0),
7406 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7407 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7408 * INTVAL (XEXP (op0, 1))));
7409
7410 if (GET_CODE (op0) != REG)
7411 op0 = force_operand (op0, NULL_RTX);
7412 if (GET_CODE (op0) != REG)
7413 op0 = copy_to_mode_reg (mode, op0);
7414
7415 return
7416 gen_rtx_MULT (mode, op0,
7417 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7418 }
7419
7420 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7421 subtarget = 0;
7422
7423 /* Check for multiplying things that have been extended
7424 from a narrower type. If this machine supports multiplying
7425 in that narrower type with a result in the desired type,
7426 do it that way, and avoid the explicit type-conversion. */
7427 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7428 && TREE_CODE (type) == INTEGER_TYPE
7429 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7430 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7431 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7432 && int_fits_type_p (TREE_OPERAND (exp, 1),
7433 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7434 /* Don't use a widening multiply if a shift will do. */
7435 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7436 > HOST_BITS_PER_WIDE_INT)
7437 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7438 ||
7439 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7440 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7441 ==
7442 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7443 /* If both operands are extended, they must either both
7444 be zero-extended or both be sign-extended. */
7445 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7446 ==
7447 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7448 {
7449 enum machine_mode innermode
7450 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7451 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7452 ? smul_widen_optab : umul_widen_optab);
7453 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7454 ? umul_widen_optab : smul_widen_optab);
7455 if (mode == GET_MODE_WIDER_MODE (innermode))
7456 {
7457 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7458 {
7459 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7460 NULL_RTX, VOIDmode, 0);
7461 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7462 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7463 VOIDmode, 0);
7464 else
7465 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7466 NULL_RTX, VOIDmode, 0);
7467 goto binop2;
7468 }
7469 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7470 && innermode == word_mode)
7471 {
7472 rtx htem;
7473 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7474 NULL_RTX, VOIDmode, 0);
7475 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7476 op1 = convert_modes (innermode, mode,
7477 expand_expr (TREE_OPERAND (exp, 1),
7478 NULL_RTX, VOIDmode, 0),
7479 unsignedp);
7480 else
7481 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7482 NULL_RTX, VOIDmode, 0);
7483 temp = expand_binop (mode, other_optab, op0, op1, target,
7484 unsignedp, OPTAB_LIB_WIDEN);
7485 htem = expand_mult_highpart_adjust (innermode,
7486 gen_highpart (innermode, temp),
7487 op0, op1,
7488 gen_highpart (innermode, temp),
7489 unsignedp);
7490 emit_move_insn (gen_highpart (innermode, temp), htem);
7491 return temp;
7492 }
7493 }
7494 }
7495 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7496 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7497 return expand_mult (mode, op0, op1, target, unsignedp);
7498
7499 case TRUNC_DIV_EXPR:
7500 case FLOOR_DIV_EXPR:
7501 case CEIL_DIV_EXPR:
7502 case ROUND_DIV_EXPR:
7503 case EXACT_DIV_EXPR:
7504 preexpand_calls (exp);
7505 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7506 subtarget = 0;
7507 /* Possible optimization: compute the dividend with EXPAND_SUM
7508 then if the divisor is constant can optimize the case
7509 where some terms of the dividend have coeffs divisible by it. */
7510 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7511 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7512 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7513
7514 case RDIV_EXPR:
7515 this_optab = flodiv_optab;
7516 goto binop;
7517
7518 case TRUNC_MOD_EXPR:
7519 case FLOOR_MOD_EXPR:
7520 case CEIL_MOD_EXPR:
7521 case ROUND_MOD_EXPR:
7522 preexpand_calls (exp);
7523 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7524 subtarget = 0;
7525 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7526 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7527 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7528
7529 case FIX_ROUND_EXPR:
7530 case FIX_FLOOR_EXPR:
7531 case FIX_CEIL_EXPR:
7532 abort (); /* Not used for C. */
7533
7534 case FIX_TRUNC_EXPR:
7535 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7536 if (target == 0)
7537 target = gen_reg_rtx (mode);
7538 expand_fix (target, op0, unsignedp);
7539 return target;
7540
7541 case FLOAT_EXPR:
7542 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7543 if (target == 0)
7544 target = gen_reg_rtx (mode);
7545 /* expand_float can't figure out what to do if FROM has VOIDmode.
7546 So give it the correct mode. With -O, cse will optimize this. */
7547 if (GET_MODE (op0) == VOIDmode)
7548 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7549 op0);
7550 expand_float (target, op0,
7551 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7552 return target;
7553
7554 case NEGATE_EXPR:
7555 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7556 temp = expand_unop (mode, neg_optab, op0, target, 0);
7557 if (temp == 0)
7558 abort ();
7559 return temp;
7560
7561 case ABS_EXPR:
7562 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7563
7564 /* Handle complex values specially. */
7565 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7566 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7567 return expand_complex_abs (mode, op0, target, unsignedp);
7568
7569 /* Unsigned abs is simply the operand. Testing here means we don't
7570 risk generating incorrect code below. */
7571 if (TREE_UNSIGNED (type))
7572 return op0;
7573
7574 return expand_abs (mode, op0, target,
7575 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7576
7577 case MAX_EXPR:
7578 case MIN_EXPR:
7579 target = original_target;
7580 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7581 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7582 || GET_MODE (target) != mode
7583 || (GET_CODE (target) == REG
7584 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7585 target = gen_reg_rtx (mode);
7586 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7587 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7588
7589 /* First try to do it with a special MIN or MAX instruction.
7590 If that does not win, use a conditional jump to select the proper
7591 value. */
7592 this_optab = (TREE_UNSIGNED (type)
7593 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7594 : (code == MIN_EXPR ? smin_optab : smax_optab));
7595
7596 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7597 OPTAB_WIDEN);
7598 if (temp != 0)
7599 return temp;
7600
7601 /* At this point, a MEM target is no longer useful; we will get better
7602 code without it. */
7603
7604 if (GET_CODE (target) == MEM)
7605 target = gen_reg_rtx (mode);
7606
7607 if (target != op0)
7608 emit_move_insn (target, op0);
7609
7610 op0 = gen_label_rtx ();
7611
7612 /* If this mode is an integer too wide to compare properly,
7613 compare word by word. Rely on cse to optimize constant cases. */
7614 if (GET_MODE_CLASS (mode) == MODE_INT
7615 && ! can_compare_p (GE, mode, ccp_jump))
7616 {
7617 if (code == MAX_EXPR)
7618 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7619 target, op1, NULL_RTX, op0);
7620 else
7621 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7622 op1, target, NULL_RTX, op0);
7623 }
7624 else
7625 {
7626 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7627 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7628 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7629 op0);
7630 }
7631 emit_move_insn (target, op1);
7632 emit_label (op0);
7633 return target;
7634
7635 case BIT_NOT_EXPR:
7636 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7637 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7638 if (temp == 0)
7639 abort ();
7640 return temp;
7641
7642 case FFS_EXPR:
7643 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7644 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7645 if (temp == 0)
7646 abort ();
7647 return temp;
7648
7649 /* ??? Can optimize bitwise operations with one arg constant.
7650 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7651 and (a bitwise1 b) bitwise2 b (etc)
7652 but that is probably not worth while. */
7653
7654 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7655 boolean values when we want in all cases to compute both of them. In
7656 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7657 as actual zero-or-1 values and then bitwise anding. In cases where
7658 there cannot be any side effects, better code would be made by
7659 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7660 how to recognize those cases. */
7661
7662 case TRUTH_AND_EXPR:
7663 case BIT_AND_EXPR:
7664 this_optab = and_optab;
7665 goto binop;
7666
7667 case TRUTH_OR_EXPR:
7668 case BIT_IOR_EXPR:
7669 this_optab = ior_optab;
7670 goto binop;
7671
7672 case TRUTH_XOR_EXPR:
7673 case BIT_XOR_EXPR:
7674 this_optab = xor_optab;
7675 goto binop;
7676
7677 case LSHIFT_EXPR:
7678 case RSHIFT_EXPR:
7679 case LROTATE_EXPR:
7680 case RROTATE_EXPR:
7681 preexpand_calls (exp);
7682 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7683 subtarget = 0;
7684 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7685 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7686 unsignedp);
7687
7688 /* Could determine the answer when only additive constants differ. Also,
7689 the addition of one can be handled by changing the condition. */
7690 case LT_EXPR:
7691 case LE_EXPR:
7692 case GT_EXPR:
7693 case GE_EXPR:
7694 case EQ_EXPR:
7695 case NE_EXPR:
7696 case UNORDERED_EXPR:
7697 case ORDERED_EXPR:
7698 case UNLT_EXPR:
7699 case UNLE_EXPR:
7700 case UNGT_EXPR:
7701 case UNGE_EXPR:
7702 case UNEQ_EXPR:
7703 preexpand_calls (exp);
7704 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7705 if (temp != 0)
7706 return temp;
7707
7708 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7709 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7710 && original_target
7711 && GET_CODE (original_target) == REG
7712 && (GET_MODE (original_target)
7713 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7714 {
7715 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7716 VOIDmode, 0);
7717
7718 if (temp != original_target)
7719 temp = copy_to_reg (temp);
7720
7721 op1 = gen_label_rtx ();
7722 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7723 GET_MODE (temp), unsignedp, 0, op1);
7724 emit_move_insn (temp, const1_rtx);
7725 emit_label (op1);
7726 return temp;
7727 }
7728
7729 /* If no set-flag instruction, must generate a conditional
7730 store into a temporary variable. Drop through
7731 and handle this like && and ||. */
7732
7733 case TRUTH_ANDIF_EXPR:
7734 case TRUTH_ORIF_EXPR:
7735 if (! ignore
7736 && (target == 0 || ! safe_from_p (target, exp, 1)
7737 /* Make sure we don't have a hard reg (such as function's return
7738 value) live across basic blocks, if not optimizing. */
7739 || (!optimize && GET_CODE (target) == REG
7740 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7741 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7742
7743 if (target)
7744 emit_clr_insn (target);
7745
7746 op1 = gen_label_rtx ();
7747 jumpifnot (exp, op1);
7748
7749 if (target)
7750 emit_0_to_1_insn (target);
7751
7752 emit_label (op1);
7753 return ignore ? const0_rtx : target;
7754
7755 case TRUTH_NOT_EXPR:
7756 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7757 /* The parser is careful to generate TRUTH_NOT_EXPR
7758 only with operands that are always zero or one. */
7759 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7760 target, 1, OPTAB_LIB_WIDEN);
7761 if (temp == 0)
7762 abort ();
7763 return temp;
7764
7765 case COMPOUND_EXPR:
7766 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7767 emit_queue ();
7768 return expand_expr (TREE_OPERAND (exp, 1),
7769 (ignore ? const0_rtx : target),
7770 VOIDmode, 0);
7771
7772 case COND_EXPR:
7773 /* If we would have a "singleton" (see below) were it not for a
7774 conversion in each arm, bring that conversion back out. */
7775 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7776 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7777 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7778 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7779 {
7780 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7781 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7782
7783 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7784 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7785 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7786 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7787 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7788 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7789 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7790 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7791 return expand_expr (build1 (NOP_EXPR, type,
7792 build (COND_EXPR, TREE_TYPE (true),
7793 TREE_OPERAND (exp, 0),
7794 true, false)),
7795 target, tmode, modifier);
7796 }
7797
7798 {
7799 /* Note that COND_EXPRs whose type is a structure or union
7800 are required to be constructed to contain assignments of
7801 a temporary variable, so that we can evaluate them here
7802 for side effect only. If type is void, we must do likewise. */
7803
7804 /* If an arm of the branch requires a cleanup,
7805 only that cleanup is performed. */
7806
7807 tree singleton = 0;
7808 tree binary_op = 0, unary_op = 0;
7809
7810 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7811 convert it to our mode, if necessary. */
7812 if (integer_onep (TREE_OPERAND (exp, 1))
7813 && integer_zerop (TREE_OPERAND (exp, 2))
7814 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7815 {
7816 if (ignore)
7817 {
7818 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7819 ro_modifier);
7820 return const0_rtx;
7821 }
7822
7823 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7824 if (GET_MODE (op0) == mode)
7825 return op0;
7826
7827 if (target == 0)
7828 target = gen_reg_rtx (mode);
7829 convert_move (target, op0, unsignedp);
7830 return target;
7831 }
7832
7833 /* Check for X ? A + B : A. If we have this, we can copy A to the
7834 output and conditionally add B. Similarly for unary operations.
7835 Don't do this if X has side-effects because those side effects
7836 might affect A or B and the "?" operation is a sequence point in
7837 ANSI. (operand_equal_p tests for side effects.) */
7838
7839 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7840 && operand_equal_p (TREE_OPERAND (exp, 2),
7841 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7842 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7843 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7844 && operand_equal_p (TREE_OPERAND (exp, 1),
7845 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7846 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7847 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7848 && operand_equal_p (TREE_OPERAND (exp, 2),
7849 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7850 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7851 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7852 && operand_equal_p (TREE_OPERAND (exp, 1),
7853 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7854 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7855
7856 /* If we are not to produce a result, we have no target. Otherwise,
7857 if a target was specified use it; it will not be used as an
7858 intermediate target unless it is safe. If no target, use a
7859 temporary. */
7860
7861 if (ignore)
7862 temp = 0;
7863 else if (original_target
7864 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7865 || (singleton && GET_CODE (original_target) == REG
7866 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7867 && original_target == var_rtx (singleton)))
7868 && GET_MODE (original_target) == mode
7869 #ifdef HAVE_conditional_move
7870 && (! can_conditionally_move_p (mode)
7871 || GET_CODE (original_target) == REG
7872 || TREE_ADDRESSABLE (type))
7873 #endif
7874 && ! (GET_CODE (original_target) == MEM
7875 && MEM_VOLATILE_P (original_target)))
7876 temp = original_target;
7877 else if (TREE_ADDRESSABLE (type))
7878 abort ();
7879 else
7880 temp = assign_temp (type, 0, 0, 1);
7881
7882 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7883 do the test of X as a store-flag operation, do this as
7884 A + ((X != 0) << log C). Similarly for other simple binary
7885 operators. Only do for C == 1 if BRANCH_COST is low. */
7886 if (temp && singleton && binary_op
7887 && (TREE_CODE (binary_op) == PLUS_EXPR
7888 || TREE_CODE (binary_op) == MINUS_EXPR
7889 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7890 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7891 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7892 : integer_onep (TREE_OPERAND (binary_op, 1)))
7893 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7894 {
7895 rtx result;
7896 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7897 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7898 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7899 : xor_optab);
7900
7901 /* If we had X ? A : A + 1, do this as A + (X == 0).
7902
7903 We have to invert the truth value here and then put it
7904 back later if do_store_flag fails. We cannot simply copy
7905 TREE_OPERAND (exp, 0) to another variable and modify that
7906 because invert_truthvalue can modify the tree pointed to
7907 by its argument. */
7908 if (singleton == TREE_OPERAND (exp, 1))
7909 TREE_OPERAND (exp, 0)
7910 = invert_truthvalue (TREE_OPERAND (exp, 0));
7911
7912 result = do_store_flag (TREE_OPERAND (exp, 0),
7913 (safe_from_p (temp, singleton, 1)
7914 ? temp : NULL_RTX),
7915 mode, BRANCH_COST <= 1);
7916
7917 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7918 result = expand_shift (LSHIFT_EXPR, mode, result,
7919 build_int_2 (tree_log2
7920 (TREE_OPERAND
7921 (binary_op, 1)),
7922 0),
7923 (safe_from_p (temp, singleton, 1)
7924 ? temp : NULL_RTX), 0);
7925
7926 if (result)
7927 {
7928 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7929 return expand_binop (mode, boptab, op1, result, temp,
7930 unsignedp, OPTAB_LIB_WIDEN);
7931 }
7932 else if (singleton == TREE_OPERAND (exp, 1))
7933 TREE_OPERAND (exp, 0)
7934 = invert_truthvalue (TREE_OPERAND (exp, 0));
7935 }
7936
7937 do_pending_stack_adjust ();
7938 NO_DEFER_POP;
7939 op0 = gen_label_rtx ();
7940
7941 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7942 {
7943 if (temp != 0)
7944 {
7945 /* If the target conflicts with the other operand of the
7946 binary op, we can't use it. Also, we can't use the target
7947 if it is a hard register, because evaluating the condition
7948 might clobber it. */
7949 if ((binary_op
7950 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7951 || (GET_CODE (temp) == REG
7952 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7953 temp = gen_reg_rtx (mode);
7954 store_expr (singleton, temp, 0);
7955 }
7956 else
7957 expand_expr (singleton,
7958 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7959 if (singleton == TREE_OPERAND (exp, 1))
7960 jumpif (TREE_OPERAND (exp, 0), op0);
7961 else
7962 jumpifnot (TREE_OPERAND (exp, 0), op0);
7963
7964 start_cleanup_deferral ();
7965 if (binary_op && temp == 0)
7966 /* Just touch the other operand. */
7967 expand_expr (TREE_OPERAND (binary_op, 1),
7968 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7969 else if (binary_op)
7970 store_expr (build (TREE_CODE (binary_op), type,
7971 make_tree (type, temp),
7972 TREE_OPERAND (binary_op, 1)),
7973 temp, 0);
7974 else
7975 store_expr (build1 (TREE_CODE (unary_op), type,
7976 make_tree (type, temp)),
7977 temp, 0);
7978 op1 = op0;
7979 }
7980 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7981 comparison operator. If we have one of these cases, set the
7982 output to A, branch on A (cse will merge these two references),
7983 then set the output to FOO. */
7984 else if (temp
7985 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7986 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7987 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7988 TREE_OPERAND (exp, 1), 0)
7989 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7990 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7991 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7992 {
7993 if (GET_CODE (temp) == REG
7994 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7995 temp = gen_reg_rtx (mode);
7996 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7997 jumpif (TREE_OPERAND (exp, 0), op0);
7998
7999 start_cleanup_deferral ();
8000 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8001 op1 = op0;
8002 }
8003 else if (temp
8004 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8005 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8006 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8007 TREE_OPERAND (exp, 2), 0)
8008 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8009 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8010 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8011 {
8012 if (GET_CODE (temp) == REG
8013 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8014 temp = gen_reg_rtx (mode);
8015 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8016 jumpifnot (TREE_OPERAND (exp, 0), op0);
8017
8018 start_cleanup_deferral ();
8019 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8020 op1 = op0;
8021 }
8022 else
8023 {
8024 op1 = gen_label_rtx ();
8025 jumpifnot (TREE_OPERAND (exp, 0), op0);
8026
8027 start_cleanup_deferral ();
8028
8029 /* One branch of the cond can be void, if it never returns. For
8030 example A ? throw : E */
8031 if (temp != 0
8032 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8033 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8034 else
8035 expand_expr (TREE_OPERAND (exp, 1),
8036 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8037 end_cleanup_deferral ();
8038 emit_queue ();
8039 emit_jump_insn (gen_jump (op1));
8040 emit_barrier ();
8041 emit_label (op0);
8042 start_cleanup_deferral ();
8043 if (temp != 0
8044 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8045 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8046 else
8047 expand_expr (TREE_OPERAND (exp, 2),
8048 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8049 }
8050
8051 end_cleanup_deferral ();
8052
8053 emit_queue ();
8054 emit_label (op1);
8055 OK_DEFER_POP;
8056
8057 return temp;
8058 }
8059
8060 case TARGET_EXPR:
8061 {
8062 /* Something needs to be initialized, but we didn't know
8063 where that thing was when building the tree. For example,
8064 it could be the return value of a function, or a parameter
8065 to a function which lays down in the stack, or a temporary
8066 variable which must be passed by reference.
8067
8068 We guarantee that the expression will either be constructed
8069 or copied into our original target. */
8070
8071 tree slot = TREE_OPERAND (exp, 0);
8072 tree cleanups = NULL_TREE;
8073 tree exp1;
8074
8075 if (TREE_CODE (slot) != VAR_DECL)
8076 abort ();
8077
8078 if (! ignore)
8079 target = original_target;
8080
8081 /* Set this here so that if we get a target that refers to a
8082 register variable that's already been used, put_reg_into_stack
8083 knows that it should fix up those uses. */
8084 TREE_USED (slot) = 1;
8085
8086 if (target == 0)
8087 {
8088 if (DECL_RTL (slot) != 0)
8089 {
8090 target = DECL_RTL (slot);
8091 /* If we have already expanded the slot, so don't do
8092 it again. (mrs) */
8093 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8094 return target;
8095 }
8096 else
8097 {
8098 target = assign_temp (type, 2, 0, 1);
8099 /* All temp slots at this level must not conflict. */
8100 preserve_temp_slots (target);
8101 DECL_RTL (slot) = target;
8102 if (TREE_ADDRESSABLE (slot))
8103 {
8104 TREE_ADDRESSABLE (slot) = 0;
8105 mark_addressable (slot);
8106 }
8107
8108 /* Since SLOT is not known to the called function
8109 to belong to its stack frame, we must build an explicit
8110 cleanup. This case occurs when we must build up a reference
8111 to pass the reference as an argument. In this case,
8112 it is very likely that such a reference need not be
8113 built here. */
8114
8115 if (TREE_OPERAND (exp, 2) == 0)
8116 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8117 cleanups = TREE_OPERAND (exp, 2);
8118 }
8119 }
8120 else
8121 {
8122 /* This case does occur, when expanding a parameter which
8123 needs to be constructed on the stack. The target
8124 is the actual stack address that we want to initialize.
8125 The function we call will perform the cleanup in this case. */
8126
8127 /* If we have already assigned it space, use that space,
8128 not target that we were passed in, as our target
8129 parameter is only a hint. */
8130 if (DECL_RTL (slot) != 0)
8131 {
8132 target = DECL_RTL (slot);
8133 /* If we have already expanded the slot, so don't do
8134 it again. (mrs) */
8135 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8136 return target;
8137 }
8138 else
8139 {
8140 DECL_RTL (slot) = target;
8141 /* If we must have an addressable slot, then make sure that
8142 the RTL that we just stored in slot is OK. */
8143 if (TREE_ADDRESSABLE (slot))
8144 {
8145 TREE_ADDRESSABLE (slot) = 0;
8146 mark_addressable (slot);
8147 }
8148 }
8149 }
8150
8151 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8152 /* Mark it as expanded. */
8153 TREE_OPERAND (exp, 1) = NULL_TREE;
8154
8155 store_expr (exp1, target, 0);
8156
8157 expand_decl_cleanup (NULL_TREE, cleanups);
8158
8159 return target;
8160 }
8161
8162 case INIT_EXPR:
8163 {
8164 tree lhs = TREE_OPERAND (exp, 0);
8165 tree rhs = TREE_OPERAND (exp, 1);
8166 tree noncopied_parts = 0;
8167 tree lhs_type = TREE_TYPE (lhs);
8168
8169 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8170 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8171 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8172 TYPE_NONCOPIED_PARTS (lhs_type));
8173 while (noncopied_parts != 0)
8174 {
8175 expand_assignment (TREE_VALUE (noncopied_parts),
8176 TREE_PURPOSE (noncopied_parts), 0, 0);
8177 noncopied_parts = TREE_CHAIN (noncopied_parts);
8178 }
8179 return temp;
8180 }
8181
8182 case MODIFY_EXPR:
8183 {
8184 /* If lhs is complex, expand calls in rhs before computing it.
8185 That's so we don't compute a pointer and save it over a call.
8186 If lhs is simple, compute it first so we can give it as a
8187 target if the rhs is just a call. This avoids an extra temp and copy
8188 and that prevents a partial-subsumption which makes bad code.
8189 Actually we could treat component_ref's of vars like vars. */
8190
8191 tree lhs = TREE_OPERAND (exp, 0);
8192 tree rhs = TREE_OPERAND (exp, 1);
8193 tree noncopied_parts = 0;
8194 tree lhs_type = TREE_TYPE (lhs);
8195
8196 temp = 0;
8197
8198 if (TREE_CODE (lhs) != VAR_DECL
8199 && TREE_CODE (lhs) != RESULT_DECL
8200 && TREE_CODE (lhs) != PARM_DECL
8201 && ! (TREE_CODE (lhs) == INDIRECT_REF
8202 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8203 preexpand_calls (exp);
8204
8205 /* Check for |= or &= of a bitfield of size one into another bitfield
8206 of size 1. In this case, (unless we need the result of the
8207 assignment) we can do this more efficiently with a
8208 test followed by an assignment, if necessary.
8209
8210 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8211 things change so we do, this code should be enhanced to
8212 support it. */
8213 if (ignore
8214 && TREE_CODE (lhs) == COMPONENT_REF
8215 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8216 || TREE_CODE (rhs) == BIT_AND_EXPR)
8217 && TREE_OPERAND (rhs, 0) == lhs
8218 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8219 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8220 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8221 {
8222 rtx label = gen_label_rtx ();
8223
8224 do_jump (TREE_OPERAND (rhs, 1),
8225 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8226 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8227 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8228 (TREE_CODE (rhs) == BIT_IOR_EXPR
8229 ? integer_one_node
8230 : integer_zero_node)),
8231 0, 0);
8232 do_pending_stack_adjust ();
8233 emit_label (label);
8234 return const0_rtx;
8235 }
8236
8237 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8238 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8239 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8240 TYPE_NONCOPIED_PARTS (lhs_type));
8241
8242 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8243 while (noncopied_parts != 0)
8244 {
8245 expand_assignment (TREE_PURPOSE (noncopied_parts),
8246 TREE_VALUE (noncopied_parts), 0, 0);
8247 noncopied_parts = TREE_CHAIN (noncopied_parts);
8248 }
8249 return temp;
8250 }
8251
8252 case RETURN_EXPR:
8253 if (!TREE_OPERAND (exp, 0))
8254 expand_null_return ();
8255 else
8256 expand_return (TREE_OPERAND (exp, 0));
8257 return const0_rtx;
8258
8259 case PREINCREMENT_EXPR:
8260 case PREDECREMENT_EXPR:
8261 return expand_increment (exp, 0, ignore);
8262
8263 case POSTINCREMENT_EXPR:
8264 case POSTDECREMENT_EXPR:
8265 /* Faster to treat as pre-increment if result is not used. */
8266 return expand_increment (exp, ! ignore, ignore);
8267
8268 case ADDR_EXPR:
8269 /* If nonzero, TEMP will be set to the address of something that might
8270 be a MEM corresponding to a stack slot. */
8271 temp = 0;
8272
8273 /* Are we taking the address of a nested function? */
8274 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8275 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8276 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8277 && ! TREE_STATIC (exp))
8278 {
8279 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8280 op0 = force_operand (op0, target);
8281 }
8282 /* If we are taking the address of something erroneous, just
8283 return a zero. */
8284 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8285 return const0_rtx;
8286 else
8287 {
8288 /* We make sure to pass const0_rtx down if we came in with
8289 ignore set, to avoid doing the cleanups twice for something. */
8290 op0 = expand_expr (TREE_OPERAND (exp, 0),
8291 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8292 (modifier == EXPAND_INITIALIZER
8293 ? modifier : EXPAND_CONST_ADDRESS));
8294
8295 /* If we are going to ignore the result, OP0 will have been set
8296 to const0_rtx, so just return it. Don't get confused and
8297 think we are taking the address of the constant. */
8298 if (ignore)
8299 return op0;
8300
8301 op0 = protect_from_queue (op0, 0);
8302
8303 /* We would like the object in memory. If it is a constant, we can
8304 have it be statically allocated into memory. For a non-constant,
8305 we need to allocate some memory and store the value into it. */
8306
8307 if (CONSTANT_P (op0))
8308 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8309 op0);
8310 else if (GET_CODE (op0) == MEM)
8311 {
8312 mark_temp_addr_taken (op0);
8313 temp = XEXP (op0, 0);
8314 }
8315
8316 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8317 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8318 {
8319 /* If this object is in a register, it must be not
8320 be BLKmode. */
8321 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8322 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8323
8324 mark_temp_addr_taken (memloc);
8325 emit_move_insn (memloc, op0);
8326 op0 = memloc;
8327 }
8328
8329 if (GET_CODE (op0) != MEM)
8330 abort ();
8331
8332 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8333 {
8334 temp = XEXP (op0, 0);
8335 #ifdef POINTERS_EXTEND_UNSIGNED
8336 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8337 && mode == ptr_mode)
8338 temp = convert_memory_address (ptr_mode, temp);
8339 #endif
8340 return temp;
8341 }
8342
8343 op0 = force_operand (XEXP (op0, 0), target);
8344 }
8345
8346 if (flag_force_addr && GET_CODE (op0) != REG)
8347 op0 = force_reg (Pmode, op0);
8348
8349 if (GET_CODE (op0) == REG
8350 && ! REG_USERVAR_P (op0))
8351 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8352
8353 /* If we might have had a temp slot, add an equivalent address
8354 for it. */
8355 if (temp != 0)
8356 update_temp_slot_address (temp, op0);
8357
8358 #ifdef POINTERS_EXTEND_UNSIGNED
8359 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8360 && mode == ptr_mode)
8361 op0 = convert_memory_address (ptr_mode, op0);
8362 #endif
8363
8364 return op0;
8365
8366 case ENTRY_VALUE_EXPR:
8367 abort ();
8368
8369 /* COMPLEX type for Extended Pascal & Fortran */
8370 case COMPLEX_EXPR:
8371 {
8372 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8373 rtx insns;
8374
8375 /* Get the rtx code of the operands. */
8376 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8377 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8378
8379 if (! target)
8380 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8381
8382 start_sequence ();
8383
8384 /* Move the real (op0) and imaginary (op1) parts to their location. */
8385 emit_move_insn (gen_realpart (mode, target), op0);
8386 emit_move_insn (gen_imagpart (mode, target), op1);
8387
8388 insns = get_insns ();
8389 end_sequence ();
8390
8391 /* Complex construction should appear as a single unit. */
8392 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8393 each with a separate pseudo as destination.
8394 It's not correct for flow to treat them as a unit. */
8395 if (GET_CODE (target) != CONCAT)
8396 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8397 else
8398 emit_insns (insns);
8399
8400 return target;
8401 }
8402
8403 case REALPART_EXPR:
8404 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8405 return gen_realpart (mode, op0);
8406
8407 case IMAGPART_EXPR:
8408 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8409 return gen_imagpart (mode, op0);
8410
8411 case CONJ_EXPR:
8412 {
8413 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8414 rtx imag_t;
8415 rtx insns;
8416
8417 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8418
8419 if (! target)
8420 target = gen_reg_rtx (mode);
8421
8422 start_sequence ();
8423
8424 /* Store the realpart and the negated imagpart to target. */
8425 emit_move_insn (gen_realpart (partmode, target),
8426 gen_realpart (partmode, op0));
8427
8428 imag_t = gen_imagpart (partmode, target);
8429 temp = expand_unop (partmode, neg_optab,
8430 gen_imagpart (partmode, op0), imag_t, 0);
8431 if (temp != imag_t)
8432 emit_move_insn (imag_t, temp);
8433
8434 insns = get_insns ();
8435 end_sequence ();
8436
8437 /* Conjugate should appear as a single unit
8438 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8439 each with a separate pseudo as destination.
8440 It's not correct for flow to treat them as a unit. */
8441 if (GET_CODE (target) != CONCAT)
8442 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8443 else
8444 emit_insns (insns);
8445
8446 return target;
8447 }
8448
8449 case TRY_CATCH_EXPR:
8450 {
8451 tree handler = TREE_OPERAND (exp, 1);
8452
8453 expand_eh_region_start ();
8454
8455 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8456
8457 expand_eh_region_end (handler);
8458
8459 return op0;
8460 }
8461
8462 case TRY_FINALLY_EXPR:
8463 {
8464 tree try_block = TREE_OPERAND (exp, 0);
8465 tree finally_block = TREE_OPERAND (exp, 1);
8466 rtx finally_label = gen_label_rtx ();
8467 rtx done_label = gen_label_rtx ();
8468 rtx return_link = gen_reg_rtx (Pmode);
8469 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8470 (tree) finally_label, (tree) return_link);
8471 TREE_SIDE_EFFECTS (cleanup) = 1;
8472
8473 /* Start a new binding layer that will keep track of all cleanup
8474 actions to be performed. */
8475 expand_start_bindings (2);
8476
8477 target_temp_slot_level = temp_slot_level;
8478
8479 expand_decl_cleanup (NULL_TREE, cleanup);
8480 op0 = expand_expr (try_block, target, tmode, modifier);
8481
8482 preserve_temp_slots (op0);
8483 expand_end_bindings (NULL_TREE, 0, 0);
8484 emit_jump (done_label);
8485 emit_label (finally_label);
8486 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8487 emit_indirect_jump (return_link);
8488 emit_label (done_label);
8489 return op0;
8490 }
8491
8492 case GOTO_SUBROUTINE_EXPR:
8493 {
8494 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8495 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8496 rtx return_address = gen_label_rtx ();
8497 emit_move_insn (return_link,
8498 gen_rtx_LABEL_REF (Pmode, return_address));
8499 emit_jump (subr);
8500 emit_label (return_address);
8501 return const0_rtx;
8502 }
8503
8504 case POPDCC_EXPR:
8505 {
8506 rtx dcc = get_dynamic_cleanup_chain ();
8507 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8508 return const0_rtx;
8509 }
8510
8511 case POPDHC_EXPR:
8512 {
8513 rtx dhc = get_dynamic_handler_chain ();
8514 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8515 return const0_rtx;
8516 }
8517
8518 case VA_ARG_EXPR:
8519 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8520
8521 default:
8522 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8523 }
8524
8525 /* Here to do an ordinary binary operator, generating an instruction
8526 from the optab already placed in `this_optab'. */
8527 binop:
8528 preexpand_calls (exp);
8529 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8530 subtarget = 0;
8531 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8532 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8533 binop2:
8534 temp = expand_binop (mode, this_optab, op0, op1, target,
8535 unsignedp, OPTAB_LIB_WIDEN);
8536 if (temp == 0)
8537 abort ();
8538 return temp;
8539 }
8540 \f
8541 /* Similar to expand_expr, except that we don't specify a target, target
8542 mode, or modifier and we return the alignment of the inner type. This is
8543 used in cases where it is not necessary to align the result to the
8544 alignment of its type as long as we know the alignment of the result, for
8545 example for comparisons of BLKmode values. */
8546
8547 static rtx
8548 expand_expr_unaligned (exp, palign)
8549 register tree exp;
8550 unsigned int *palign;
8551 {
8552 register rtx op0;
8553 tree type = TREE_TYPE (exp);
8554 register enum machine_mode mode = TYPE_MODE (type);
8555
8556 /* Default the alignment we return to that of the type. */
8557 *palign = TYPE_ALIGN (type);
8558
8559 /* The only cases in which we do anything special is if the resulting mode
8560 is BLKmode. */
8561 if (mode != BLKmode)
8562 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8563
8564 switch (TREE_CODE (exp))
8565 {
8566 case CONVERT_EXPR:
8567 case NOP_EXPR:
8568 case NON_LVALUE_EXPR:
8569 /* Conversions between BLKmode values don't change the underlying
8570 alignment or value. */
8571 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8572 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8573 break;
8574
8575 case ARRAY_REF:
8576 /* Much of the code for this case is copied directly from expand_expr.
8577 We need to duplicate it here because we will do something different
8578 in the fall-through case, so we need to handle the same exceptions
8579 it does. */
8580 {
8581 tree array = TREE_OPERAND (exp, 0);
8582 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8583 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8584 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8585 HOST_WIDE_INT i;
8586
8587 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8588 abort ();
8589
8590 /* Optimize the special-case of a zero lower bound.
8591
8592 We convert the low_bound to sizetype to avoid some problems
8593 with constant folding. (E.g. suppose the lower bound is 1,
8594 and its mode is QI. Without the conversion, (ARRAY
8595 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8596 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8597
8598 if (! integer_zerop (low_bound))
8599 index = size_diffop (index, convert (sizetype, low_bound));
8600
8601 /* If this is a constant index into a constant array,
8602 just get the value from the array. Handle both the cases when
8603 we have an explicit constructor and when our operand is a variable
8604 that was declared const. */
8605
8606 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8607 && 0 > compare_tree_int (index,
8608 list_length (CONSTRUCTOR_ELTS
8609 (TREE_OPERAND (exp, 0)))))
8610 {
8611 tree elem;
8612
8613 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8614 i = TREE_INT_CST_LOW (index);
8615 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8616 ;
8617
8618 if (elem)
8619 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8620 }
8621
8622 else if (optimize >= 1
8623 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8624 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8625 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8626 {
8627 if (TREE_CODE (index) == INTEGER_CST)
8628 {
8629 tree init = DECL_INITIAL (array);
8630
8631 if (TREE_CODE (init) == CONSTRUCTOR)
8632 {
8633 tree elem;
8634
8635 for (elem = CONSTRUCTOR_ELTS (init);
8636 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8637 elem = TREE_CHAIN (elem))
8638 ;
8639
8640 if (elem)
8641 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8642 palign);
8643 }
8644 }
8645 }
8646 }
8647 /* Fall through. */
8648
8649 case COMPONENT_REF:
8650 case BIT_FIELD_REF:
8651 /* If the operand is a CONSTRUCTOR, we can just extract the
8652 appropriate field if it is present. Don't do this if we have
8653 already written the data since we want to refer to that copy
8654 and varasm.c assumes that's what we'll do. */
8655 if (TREE_CODE (exp) != ARRAY_REF
8656 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8657 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8658 {
8659 tree elt;
8660
8661 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8662 elt = TREE_CHAIN (elt))
8663 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8664 /* Note that unlike the case in expand_expr, we know this is
8665 BLKmode and hence not an integer. */
8666 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8667 }
8668
8669 {
8670 enum machine_mode mode1;
8671 HOST_WIDE_INT bitsize, bitpos;
8672 tree offset;
8673 int volatilep = 0;
8674 unsigned int alignment;
8675 int unsignedp;
8676 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8677 &mode1, &unsignedp, &volatilep,
8678 &alignment);
8679
8680 /* If we got back the original object, something is wrong. Perhaps
8681 we are evaluating an expression too early. In any event, don't
8682 infinitely recurse. */
8683 if (tem == exp)
8684 abort ();
8685
8686 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8687
8688 /* If this is a constant, put it into a register if it is a
8689 legitimate constant and OFFSET is 0 and memory if it isn't. */
8690 if (CONSTANT_P (op0))
8691 {
8692 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8693
8694 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8695 && offset == 0)
8696 op0 = force_reg (inner_mode, op0);
8697 else
8698 op0 = validize_mem (force_const_mem (inner_mode, op0));
8699 }
8700
8701 if (offset != 0)
8702 {
8703 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8704
8705 /* If this object is in a register, put it into memory.
8706 This case can't occur in C, but can in Ada if we have
8707 unchecked conversion of an expression from a scalar type to
8708 an array or record type. */
8709 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8710 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8711 {
8712 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8713
8714 mark_temp_addr_taken (memloc);
8715 emit_move_insn (memloc, op0);
8716 op0 = memloc;
8717 }
8718
8719 if (GET_CODE (op0) != MEM)
8720 abort ();
8721
8722 if (GET_MODE (offset_rtx) != ptr_mode)
8723 {
8724 #ifdef POINTERS_EXTEND_UNSIGNED
8725 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8726 #else
8727 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8728 #endif
8729 }
8730
8731 op0 = change_address (op0, VOIDmode,
8732 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8733 force_reg (ptr_mode,
8734 offset_rtx)));
8735 }
8736
8737 /* Don't forget about volatility even if this is a bitfield. */
8738 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8739 {
8740 op0 = copy_rtx (op0);
8741 MEM_VOLATILE_P (op0) = 1;
8742 }
8743
8744 /* Check the access. */
8745 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8746 {
8747 rtx to;
8748 int size;
8749
8750 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8751 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8752
8753 /* Check the access right of the pointer. */
8754 in_check_memory_usage = 1;
8755 if (size > BITS_PER_UNIT)
8756 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8757 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8758 TYPE_MODE (sizetype),
8759 GEN_INT (MEMORY_USE_RO),
8760 TYPE_MODE (integer_type_node));
8761 in_check_memory_usage = 0;
8762 }
8763
8764 /* In cases where an aligned union has an unaligned object
8765 as a field, we might be extracting a BLKmode value from
8766 an integer-mode (e.g., SImode) object. Handle this case
8767 by doing the extract into an object as wide as the field
8768 (which we know to be the width of a basic mode), then
8769 storing into memory, and changing the mode to BLKmode.
8770 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8771 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8772 if (mode1 == VOIDmode
8773 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8774 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8775 && (TYPE_ALIGN (type) > alignment
8776 || bitpos % TYPE_ALIGN (type) != 0)))
8777 {
8778 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8779
8780 if (ext_mode == BLKmode)
8781 {
8782 /* In this case, BITPOS must start at a byte boundary. */
8783 if (GET_CODE (op0) != MEM
8784 || bitpos % BITS_PER_UNIT != 0)
8785 abort ();
8786
8787 op0 = change_address (op0, VOIDmode,
8788 plus_constant (XEXP (op0, 0),
8789 bitpos / BITS_PER_UNIT));
8790 }
8791 else
8792 {
8793 rtx new = assign_stack_temp (ext_mode,
8794 bitsize / BITS_PER_UNIT, 0);
8795
8796 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8797 unsignedp, NULL_RTX, ext_mode,
8798 ext_mode, alignment,
8799 int_size_in_bytes (TREE_TYPE (tem)));
8800
8801 /* If the result is a record type and BITSIZE is narrower than
8802 the mode of OP0, an integral mode, and this is a big endian
8803 machine, we must put the field into the high-order bits. */
8804 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8805 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8806 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8807 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8808 size_int (GET_MODE_BITSIZE
8809 (GET_MODE (op0))
8810 - bitsize),
8811 op0, 1);
8812
8813 emit_move_insn (new, op0);
8814 op0 = copy_rtx (new);
8815 PUT_MODE (op0, BLKmode);
8816 }
8817 }
8818 else
8819 /* Get a reference to just this component. */
8820 op0 = change_address (op0, mode1,
8821 plus_constant (XEXP (op0, 0),
8822 (bitpos / BITS_PER_UNIT)));
8823
8824 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8825
8826 /* Adjust the alignment in case the bit position is not
8827 a multiple of the alignment of the inner object. */
8828 while (bitpos % alignment != 0)
8829 alignment >>= 1;
8830
8831 if (GET_CODE (XEXP (op0, 0)) == REG)
8832 mark_reg_pointer (XEXP (op0, 0), alignment);
8833
8834 MEM_IN_STRUCT_P (op0) = 1;
8835 MEM_VOLATILE_P (op0) |= volatilep;
8836
8837 *palign = alignment;
8838 return op0;
8839 }
8840
8841 default:
8842 break;
8843
8844 }
8845
8846 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8847 }
8848 \f
8849 /* Return the tree node if a ARG corresponds to a string constant or zero
8850 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8851 in bytes within the string that ARG is accessing. The type of the
8852 offset will be `sizetype'. */
8853
8854 tree
8855 string_constant (arg, ptr_offset)
8856 tree arg;
8857 tree *ptr_offset;
8858 {
8859 STRIP_NOPS (arg);
8860
8861 if (TREE_CODE (arg) == ADDR_EXPR
8862 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8863 {
8864 *ptr_offset = size_zero_node;
8865 return TREE_OPERAND (arg, 0);
8866 }
8867 else if (TREE_CODE (arg) == PLUS_EXPR)
8868 {
8869 tree arg0 = TREE_OPERAND (arg, 0);
8870 tree arg1 = TREE_OPERAND (arg, 1);
8871
8872 STRIP_NOPS (arg0);
8873 STRIP_NOPS (arg1);
8874
8875 if (TREE_CODE (arg0) == ADDR_EXPR
8876 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8877 {
8878 *ptr_offset = convert (sizetype, arg1);
8879 return TREE_OPERAND (arg0, 0);
8880 }
8881 else if (TREE_CODE (arg1) == ADDR_EXPR
8882 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8883 {
8884 *ptr_offset = convert (sizetype, arg0);
8885 return TREE_OPERAND (arg1, 0);
8886 }
8887 }
8888
8889 return 0;
8890 }
8891 \f
8892 /* Expand code for a post- or pre- increment or decrement
8893 and return the RTX for the result.
8894 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8895
8896 static rtx
8897 expand_increment (exp, post, ignore)
8898 register tree exp;
8899 int post, ignore;
8900 {
8901 register rtx op0, op1;
8902 register rtx temp, value;
8903 register tree incremented = TREE_OPERAND (exp, 0);
8904 optab this_optab = add_optab;
8905 int icode;
8906 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8907 int op0_is_copy = 0;
8908 int single_insn = 0;
8909 /* 1 means we can't store into OP0 directly,
8910 because it is a subreg narrower than a word,
8911 and we don't dare clobber the rest of the word. */
8912 int bad_subreg = 0;
8913
8914 /* Stabilize any component ref that might need to be
8915 evaluated more than once below. */
8916 if (!post
8917 || TREE_CODE (incremented) == BIT_FIELD_REF
8918 || (TREE_CODE (incremented) == COMPONENT_REF
8919 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8920 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8921 incremented = stabilize_reference (incremented);
8922 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8923 ones into save exprs so that they don't accidentally get evaluated
8924 more than once by the code below. */
8925 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8926 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8927 incremented = save_expr (incremented);
8928
8929 /* Compute the operands as RTX.
8930 Note whether OP0 is the actual lvalue or a copy of it:
8931 I believe it is a copy iff it is a register or subreg
8932 and insns were generated in computing it. */
8933
8934 temp = get_last_insn ();
8935 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8936
8937 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8938 in place but instead must do sign- or zero-extension during assignment,
8939 so we copy it into a new register and let the code below use it as
8940 a copy.
8941
8942 Note that we can safely modify this SUBREG since it is know not to be
8943 shared (it was made by the expand_expr call above). */
8944
8945 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8946 {
8947 if (post)
8948 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8949 else
8950 bad_subreg = 1;
8951 }
8952 else if (GET_CODE (op0) == SUBREG
8953 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8954 {
8955 /* We cannot increment this SUBREG in place. If we are
8956 post-incrementing, get a copy of the old value. Otherwise,
8957 just mark that we cannot increment in place. */
8958 if (post)
8959 op0 = copy_to_reg (op0);
8960 else
8961 bad_subreg = 1;
8962 }
8963
8964 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8965 && temp != get_last_insn ());
8966 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8967 EXPAND_MEMORY_USE_BAD);
8968
8969 /* Decide whether incrementing or decrementing. */
8970 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8971 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8972 this_optab = sub_optab;
8973
8974 /* Convert decrement by a constant into a negative increment. */
8975 if (this_optab == sub_optab
8976 && GET_CODE (op1) == CONST_INT)
8977 {
8978 op1 = GEN_INT (-INTVAL (op1));
8979 this_optab = add_optab;
8980 }
8981
8982 /* For a preincrement, see if we can do this with a single instruction. */
8983 if (!post)
8984 {
8985 icode = (int) this_optab->handlers[(int) mode].insn_code;
8986 if (icode != (int) CODE_FOR_nothing
8987 /* Make sure that OP0 is valid for operands 0 and 1
8988 of the insn we want to queue. */
8989 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8990 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8991 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8992 single_insn = 1;
8993 }
8994
8995 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8996 then we cannot just increment OP0. We must therefore contrive to
8997 increment the original value. Then, for postincrement, we can return
8998 OP0 since it is a copy of the old value. For preincrement, expand here
8999 unless we can do it with a single insn.
9000
9001 Likewise if storing directly into OP0 would clobber high bits
9002 we need to preserve (bad_subreg). */
9003 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9004 {
9005 /* This is the easiest way to increment the value wherever it is.
9006 Problems with multiple evaluation of INCREMENTED are prevented
9007 because either (1) it is a component_ref or preincrement,
9008 in which case it was stabilized above, or (2) it is an array_ref
9009 with constant index in an array in a register, which is
9010 safe to reevaluate. */
9011 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9012 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9013 ? MINUS_EXPR : PLUS_EXPR),
9014 TREE_TYPE (exp),
9015 incremented,
9016 TREE_OPERAND (exp, 1));
9017
9018 while (TREE_CODE (incremented) == NOP_EXPR
9019 || TREE_CODE (incremented) == CONVERT_EXPR)
9020 {
9021 newexp = convert (TREE_TYPE (incremented), newexp);
9022 incremented = TREE_OPERAND (incremented, 0);
9023 }
9024
9025 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9026 return post ? op0 : temp;
9027 }
9028
9029 if (post)
9030 {
9031 /* We have a true reference to the value in OP0.
9032 If there is an insn to add or subtract in this mode, queue it.
9033 Queueing the increment insn avoids the register shuffling
9034 that often results if we must increment now and first save
9035 the old value for subsequent use. */
9036
9037 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9038 op0 = stabilize (op0);
9039 #endif
9040
9041 icode = (int) this_optab->handlers[(int) mode].insn_code;
9042 if (icode != (int) CODE_FOR_nothing
9043 /* Make sure that OP0 is valid for operands 0 and 1
9044 of the insn we want to queue. */
9045 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9046 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9047 {
9048 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9049 op1 = force_reg (mode, op1);
9050
9051 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9052 }
9053 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9054 {
9055 rtx addr = (general_operand (XEXP (op0, 0), mode)
9056 ? force_reg (Pmode, XEXP (op0, 0))
9057 : copy_to_reg (XEXP (op0, 0)));
9058 rtx temp, result;
9059
9060 op0 = change_address (op0, VOIDmode, addr);
9061 temp = force_reg (GET_MODE (op0), op0);
9062 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9063 op1 = force_reg (mode, op1);
9064
9065 /* The increment queue is LIFO, thus we have to `queue'
9066 the instructions in reverse order. */
9067 enqueue_insn (op0, gen_move_insn (op0, temp));
9068 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9069 return result;
9070 }
9071 }
9072
9073 /* Preincrement, or we can't increment with one simple insn. */
9074 if (post)
9075 /* Save a copy of the value before inc or dec, to return it later. */
9076 temp = value = copy_to_reg (op0);
9077 else
9078 /* Arrange to return the incremented value. */
9079 /* Copy the rtx because expand_binop will protect from the queue,
9080 and the results of that would be invalid for us to return
9081 if our caller does emit_queue before using our result. */
9082 temp = copy_rtx (value = op0);
9083
9084 /* Increment however we can. */
9085 op1 = expand_binop (mode, this_optab, value, op1,
9086 current_function_check_memory_usage ? NULL_RTX : op0,
9087 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9088 /* Make sure the value is stored into OP0. */
9089 if (op1 != op0)
9090 emit_move_insn (op0, op1);
9091
9092 return temp;
9093 }
9094 \f
9095 /* Expand all function calls contained within EXP, innermost ones first.
9096 But don't look within expressions that have sequence points.
9097 For each CALL_EXPR, record the rtx for its value
9098 in the CALL_EXPR_RTL field. */
9099
9100 static void
9101 preexpand_calls (exp)
9102 tree exp;
9103 {
9104 register int nops, i;
9105 int class = TREE_CODE_CLASS (TREE_CODE (exp));
9106
9107 if (! do_preexpand_calls)
9108 return;
9109
9110 /* Only expressions and references can contain calls. */
9111
9112 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9113 return;
9114
9115 switch (TREE_CODE (exp))
9116 {
9117 case CALL_EXPR:
9118 /* Do nothing if already expanded. */
9119 if (CALL_EXPR_RTL (exp) != 0
9120 /* Do nothing if the call returns a variable-sized object. */
9121 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9122 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9123 /* Do nothing to built-in functions. */
9124 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9125 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9126 == FUNCTION_DECL)
9127 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9128 return;
9129
9130 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9131 return;
9132
9133 case COMPOUND_EXPR:
9134 case COND_EXPR:
9135 case TRUTH_ANDIF_EXPR:
9136 case TRUTH_ORIF_EXPR:
9137 /* If we find one of these, then we can be sure
9138 the adjust will be done for it (since it makes jumps).
9139 Do it now, so that if this is inside an argument
9140 of a function, we don't get the stack adjustment
9141 after some other args have already been pushed. */
9142 do_pending_stack_adjust ();
9143 return;
9144
9145 case BLOCK:
9146 case RTL_EXPR:
9147 case WITH_CLEANUP_EXPR:
9148 case CLEANUP_POINT_EXPR:
9149 case TRY_CATCH_EXPR:
9150 return;
9151
9152 case SAVE_EXPR:
9153 if (SAVE_EXPR_RTL (exp) != 0)
9154 return;
9155
9156 default:
9157 break;
9158 }
9159
9160 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
9161 for (i = 0; i < nops; i++)
9162 if (TREE_OPERAND (exp, i) != 0)
9163 {
9164 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9165 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9166 It doesn't happen before the call is made. */
9167 ;
9168 else
9169 {
9170 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9171 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9172 preexpand_calls (TREE_OPERAND (exp, i));
9173 }
9174 }
9175 }
9176 \f
9177 /* At the start of a function, record that we have no previously-pushed
9178 arguments waiting to be popped. */
9179
9180 void
9181 init_pending_stack_adjust ()
9182 {
9183 pending_stack_adjust = 0;
9184 }
9185
9186 /* When exiting from function, if safe, clear out any pending stack adjust
9187 so the adjustment won't get done.
9188
9189 Note, if the current function calls alloca, then it must have a
9190 frame pointer regardless of the value of flag_omit_frame_pointer. */
9191
9192 void
9193 clear_pending_stack_adjust ()
9194 {
9195 #ifdef EXIT_IGNORE_STACK
9196 if (optimize > 0
9197 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9198 && EXIT_IGNORE_STACK
9199 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9200 && ! flag_inline_functions)
9201 {
9202 stack_pointer_delta -= pending_stack_adjust,
9203 pending_stack_adjust = 0;
9204 }
9205 #endif
9206 }
9207
9208 /* Pop any previously-pushed arguments that have not been popped yet. */
9209
9210 void
9211 do_pending_stack_adjust ()
9212 {
9213 if (inhibit_defer_pop == 0)
9214 {
9215 if (pending_stack_adjust != 0)
9216 adjust_stack (GEN_INT (pending_stack_adjust));
9217 pending_stack_adjust = 0;
9218 }
9219 }
9220 \f
9221 /* Expand conditional expressions. */
9222
9223 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9224 LABEL is an rtx of code CODE_LABEL, in this function and all the
9225 functions here. */
9226
9227 void
9228 jumpifnot (exp, label)
9229 tree exp;
9230 rtx label;
9231 {
9232 do_jump (exp, label, NULL_RTX);
9233 }
9234
9235 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9236
9237 void
9238 jumpif (exp, label)
9239 tree exp;
9240 rtx label;
9241 {
9242 do_jump (exp, NULL_RTX, label);
9243 }
9244
9245 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9246 the result is zero, or IF_TRUE_LABEL if the result is one.
9247 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9248 meaning fall through in that case.
9249
9250 do_jump always does any pending stack adjust except when it does not
9251 actually perform a jump. An example where there is no jump
9252 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9253
9254 This function is responsible for optimizing cases such as
9255 &&, || and comparison operators in EXP. */
9256
9257 void
9258 do_jump (exp, if_false_label, if_true_label)
9259 tree exp;
9260 rtx if_false_label, if_true_label;
9261 {
9262 register enum tree_code code = TREE_CODE (exp);
9263 /* Some cases need to create a label to jump to
9264 in order to properly fall through.
9265 These cases set DROP_THROUGH_LABEL nonzero. */
9266 rtx drop_through_label = 0;
9267 rtx temp;
9268 int i;
9269 tree type;
9270 enum machine_mode mode;
9271
9272 #ifdef MAX_INTEGER_COMPUTATION_MODE
9273 check_max_integer_computation_mode (exp);
9274 #endif
9275
9276 emit_queue ();
9277
9278 switch (code)
9279 {
9280 case ERROR_MARK:
9281 break;
9282
9283 case INTEGER_CST:
9284 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9285 if (temp)
9286 emit_jump (temp);
9287 break;
9288
9289 #if 0
9290 /* This is not true with #pragma weak */
9291 case ADDR_EXPR:
9292 /* The address of something can never be zero. */
9293 if (if_true_label)
9294 emit_jump (if_true_label);
9295 break;
9296 #endif
9297
9298 case NOP_EXPR:
9299 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9300 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9301 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9302 goto normal;
9303 case CONVERT_EXPR:
9304 /* If we are narrowing the operand, we have to do the compare in the
9305 narrower mode. */
9306 if ((TYPE_PRECISION (TREE_TYPE (exp))
9307 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9308 goto normal;
9309 case NON_LVALUE_EXPR:
9310 case REFERENCE_EXPR:
9311 case ABS_EXPR:
9312 case NEGATE_EXPR:
9313 case LROTATE_EXPR:
9314 case RROTATE_EXPR:
9315 /* These cannot change zero->non-zero or vice versa. */
9316 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9317 break;
9318
9319 case WITH_RECORD_EXPR:
9320 /* Put the object on the placeholder list, recurse through our first
9321 operand, and pop the list. */
9322 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9323 placeholder_list);
9324 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9325 placeholder_list = TREE_CHAIN (placeholder_list);
9326 break;
9327
9328 #if 0
9329 /* This is never less insns than evaluating the PLUS_EXPR followed by
9330 a test and can be longer if the test is eliminated. */
9331 case PLUS_EXPR:
9332 /* Reduce to minus. */
9333 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9334 TREE_OPERAND (exp, 0),
9335 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9336 TREE_OPERAND (exp, 1))));
9337 /* Process as MINUS. */
9338 #endif
9339
9340 case MINUS_EXPR:
9341 /* Non-zero iff operands of minus differ. */
9342 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9343 TREE_OPERAND (exp, 0),
9344 TREE_OPERAND (exp, 1)),
9345 NE, NE, if_false_label, if_true_label);
9346 break;
9347
9348 case BIT_AND_EXPR:
9349 /* If we are AND'ing with a small constant, do this comparison in the
9350 smallest type that fits. If the machine doesn't have comparisons
9351 that small, it will be converted back to the wider comparison.
9352 This helps if we are testing the sign bit of a narrower object.
9353 combine can't do this for us because it can't know whether a
9354 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9355
9356 if (! SLOW_BYTE_ACCESS
9357 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9358 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9359 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9360 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9361 && (type = type_for_mode (mode, 1)) != 0
9362 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9363 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9364 != CODE_FOR_nothing))
9365 {
9366 do_jump (convert (type, exp), if_false_label, if_true_label);
9367 break;
9368 }
9369 goto normal;
9370
9371 case TRUTH_NOT_EXPR:
9372 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9373 break;
9374
9375 case TRUTH_ANDIF_EXPR:
9376 if (if_false_label == 0)
9377 if_false_label = drop_through_label = gen_label_rtx ();
9378 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9379 start_cleanup_deferral ();
9380 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9381 end_cleanup_deferral ();
9382 break;
9383
9384 case TRUTH_ORIF_EXPR:
9385 if (if_true_label == 0)
9386 if_true_label = drop_through_label = gen_label_rtx ();
9387 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9388 start_cleanup_deferral ();
9389 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9390 end_cleanup_deferral ();
9391 break;
9392
9393 case COMPOUND_EXPR:
9394 push_temp_slots ();
9395 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9396 preserve_temp_slots (NULL_RTX);
9397 free_temp_slots ();
9398 pop_temp_slots ();
9399 emit_queue ();
9400 do_pending_stack_adjust ();
9401 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9402 break;
9403
9404 case COMPONENT_REF:
9405 case BIT_FIELD_REF:
9406 case ARRAY_REF:
9407 {
9408 HOST_WIDE_INT bitsize, bitpos;
9409 int unsignedp;
9410 enum machine_mode mode;
9411 tree type;
9412 tree offset;
9413 int volatilep = 0;
9414 unsigned int alignment;
9415
9416 /* Get description of this reference. We don't actually care
9417 about the underlying object here. */
9418 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9419 &unsignedp, &volatilep, &alignment);
9420
9421 type = type_for_size (bitsize, unsignedp);
9422 if (! SLOW_BYTE_ACCESS
9423 && type != 0 && bitsize >= 0
9424 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9425 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9426 != CODE_FOR_nothing))
9427 {
9428 do_jump (convert (type, exp), if_false_label, if_true_label);
9429 break;
9430 }
9431 goto normal;
9432 }
9433
9434 case COND_EXPR:
9435 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9436 if (integer_onep (TREE_OPERAND (exp, 1))
9437 && integer_zerop (TREE_OPERAND (exp, 2)))
9438 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9439
9440 else if (integer_zerop (TREE_OPERAND (exp, 1))
9441 && integer_onep (TREE_OPERAND (exp, 2)))
9442 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9443
9444 else
9445 {
9446 register rtx label1 = gen_label_rtx ();
9447 drop_through_label = gen_label_rtx ();
9448
9449 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9450
9451 start_cleanup_deferral ();
9452 /* Now the THEN-expression. */
9453 do_jump (TREE_OPERAND (exp, 1),
9454 if_false_label ? if_false_label : drop_through_label,
9455 if_true_label ? if_true_label : drop_through_label);
9456 /* In case the do_jump just above never jumps. */
9457 do_pending_stack_adjust ();
9458 emit_label (label1);
9459
9460 /* Now the ELSE-expression. */
9461 do_jump (TREE_OPERAND (exp, 2),
9462 if_false_label ? if_false_label : drop_through_label,
9463 if_true_label ? if_true_label : drop_through_label);
9464 end_cleanup_deferral ();
9465 }
9466 break;
9467
9468 case EQ_EXPR:
9469 {
9470 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9471
9472 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9473 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9474 {
9475 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9476 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9477 do_jump
9478 (fold
9479 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9480 fold (build (EQ_EXPR, TREE_TYPE (exp),
9481 fold (build1 (REALPART_EXPR,
9482 TREE_TYPE (inner_type),
9483 exp0)),
9484 fold (build1 (REALPART_EXPR,
9485 TREE_TYPE (inner_type),
9486 exp1)))),
9487 fold (build (EQ_EXPR, TREE_TYPE (exp),
9488 fold (build1 (IMAGPART_EXPR,
9489 TREE_TYPE (inner_type),
9490 exp0)),
9491 fold (build1 (IMAGPART_EXPR,
9492 TREE_TYPE (inner_type),
9493 exp1)))))),
9494 if_false_label, if_true_label);
9495 }
9496
9497 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9498 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9499
9500 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9501 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9502 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9503 else
9504 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9505 break;
9506 }
9507
9508 case NE_EXPR:
9509 {
9510 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9511
9512 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9513 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9514 {
9515 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9516 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9517 do_jump
9518 (fold
9519 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9520 fold (build (NE_EXPR, TREE_TYPE (exp),
9521 fold (build1 (REALPART_EXPR,
9522 TREE_TYPE (inner_type),
9523 exp0)),
9524 fold (build1 (REALPART_EXPR,
9525 TREE_TYPE (inner_type),
9526 exp1)))),
9527 fold (build (NE_EXPR, TREE_TYPE (exp),
9528 fold (build1 (IMAGPART_EXPR,
9529 TREE_TYPE (inner_type),
9530 exp0)),
9531 fold (build1 (IMAGPART_EXPR,
9532 TREE_TYPE (inner_type),
9533 exp1)))))),
9534 if_false_label, if_true_label);
9535 }
9536
9537 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9538 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9539
9540 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9541 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9542 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9543 else
9544 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9545 break;
9546 }
9547
9548 case LT_EXPR:
9549 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9550 if (GET_MODE_CLASS (mode) == MODE_INT
9551 && ! can_compare_p (LT, mode, ccp_jump))
9552 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9553 else
9554 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9555 break;
9556
9557 case LE_EXPR:
9558 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9559 if (GET_MODE_CLASS (mode) == MODE_INT
9560 && ! can_compare_p (LE, mode, ccp_jump))
9561 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9562 else
9563 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9564 break;
9565
9566 case GT_EXPR:
9567 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9568 if (GET_MODE_CLASS (mode) == MODE_INT
9569 && ! can_compare_p (GT, mode, ccp_jump))
9570 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9571 else
9572 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9573 break;
9574
9575 case GE_EXPR:
9576 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9577 if (GET_MODE_CLASS (mode) == MODE_INT
9578 && ! can_compare_p (GE, mode, ccp_jump))
9579 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9580 else
9581 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9582 break;
9583
9584 case UNORDERED_EXPR:
9585 case ORDERED_EXPR:
9586 {
9587 enum rtx_code cmp, rcmp;
9588 int do_rev;
9589
9590 if (code == UNORDERED_EXPR)
9591 cmp = UNORDERED, rcmp = ORDERED;
9592 else
9593 cmp = ORDERED, rcmp = UNORDERED;
9594 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9595
9596 do_rev = 0;
9597 if (! can_compare_p (cmp, mode, ccp_jump)
9598 && (can_compare_p (rcmp, mode, ccp_jump)
9599 /* If the target doesn't provide either UNORDERED or ORDERED
9600 comparisons, canonicalize on UNORDERED for the library. */
9601 || rcmp == UNORDERED))
9602 do_rev = 1;
9603
9604 if (! do_rev)
9605 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9606 else
9607 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9608 }
9609 break;
9610
9611 {
9612 enum rtx_code rcode1;
9613 enum tree_code tcode2;
9614
9615 case UNLT_EXPR:
9616 rcode1 = UNLT;
9617 tcode2 = LT_EXPR;
9618 goto unordered_bcc;
9619 case UNLE_EXPR:
9620 rcode1 = UNLE;
9621 tcode2 = LE_EXPR;
9622 goto unordered_bcc;
9623 case UNGT_EXPR:
9624 rcode1 = UNGT;
9625 tcode2 = GT_EXPR;
9626 goto unordered_bcc;
9627 case UNGE_EXPR:
9628 rcode1 = UNGE;
9629 tcode2 = GE_EXPR;
9630 goto unordered_bcc;
9631 case UNEQ_EXPR:
9632 rcode1 = UNEQ;
9633 tcode2 = EQ_EXPR;
9634 goto unordered_bcc;
9635
9636 unordered_bcc:
9637 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9638 if (can_compare_p (rcode1, mode, ccp_jump))
9639 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9640 if_true_label);
9641 else
9642 {
9643 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9644 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9645 tree cmp0, cmp1;
9646
9647 /* If the target doesn't support combined unordered
9648 compares, decompose into UNORDERED + comparison. */
9649 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9650 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9651 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9652 do_jump (exp, if_false_label, if_true_label);
9653 }
9654 }
9655 break;
9656
9657 default:
9658 normal:
9659 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9660 #if 0
9661 /* This is not needed any more and causes poor code since it causes
9662 comparisons and tests from non-SI objects to have different code
9663 sequences. */
9664 /* Copy to register to avoid generating bad insns by cse
9665 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9666 if (!cse_not_expected && GET_CODE (temp) == MEM)
9667 temp = copy_to_reg (temp);
9668 #endif
9669 do_pending_stack_adjust ();
9670 /* Do any postincrements in the expression that was tested. */
9671 emit_queue ();
9672
9673 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9674 {
9675 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9676 if (target)
9677 emit_jump (target);
9678 }
9679 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9680 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9681 /* Note swapping the labels gives us not-equal. */
9682 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9683 else if (GET_MODE (temp) != VOIDmode)
9684 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9685 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9686 GET_MODE (temp), NULL_RTX, 0,
9687 if_false_label, if_true_label);
9688 else
9689 abort ();
9690 }
9691
9692 if (drop_through_label)
9693 {
9694 /* If do_jump produces code that might be jumped around,
9695 do any stack adjusts from that code, before the place
9696 where control merges in. */
9697 do_pending_stack_adjust ();
9698 emit_label (drop_through_label);
9699 }
9700 }
9701 \f
9702 /* Given a comparison expression EXP for values too wide to be compared
9703 with one insn, test the comparison and jump to the appropriate label.
9704 The code of EXP is ignored; we always test GT if SWAP is 0,
9705 and LT if SWAP is 1. */
9706
9707 static void
9708 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9709 tree exp;
9710 int swap;
9711 rtx if_false_label, if_true_label;
9712 {
9713 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9714 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9715 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9716 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9717
9718 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9719 }
9720
9721 /* Compare OP0 with OP1, word at a time, in mode MODE.
9722 UNSIGNEDP says to do unsigned comparison.
9723 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9724
9725 void
9726 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9727 enum machine_mode mode;
9728 int unsignedp;
9729 rtx op0, op1;
9730 rtx if_false_label, if_true_label;
9731 {
9732 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9733 rtx drop_through_label = 0;
9734 int i;
9735
9736 if (! if_true_label || ! if_false_label)
9737 drop_through_label = gen_label_rtx ();
9738 if (! if_true_label)
9739 if_true_label = drop_through_label;
9740 if (! if_false_label)
9741 if_false_label = drop_through_label;
9742
9743 /* Compare a word at a time, high order first. */
9744 for (i = 0; i < nwords; i++)
9745 {
9746 rtx op0_word, op1_word;
9747
9748 if (WORDS_BIG_ENDIAN)
9749 {
9750 op0_word = operand_subword_force (op0, i, mode);
9751 op1_word = operand_subword_force (op1, i, mode);
9752 }
9753 else
9754 {
9755 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9756 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9757 }
9758
9759 /* All but high-order word must be compared as unsigned. */
9760 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9761 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9762 NULL_RTX, if_true_label);
9763
9764 /* Consider lower words only if these are equal. */
9765 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9766 NULL_RTX, 0, NULL_RTX, if_false_label);
9767 }
9768
9769 if (if_false_label)
9770 emit_jump (if_false_label);
9771 if (drop_through_label)
9772 emit_label (drop_through_label);
9773 }
9774
9775 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9776 with one insn, test the comparison and jump to the appropriate label. */
9777
9778 static void
9779 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9780 tree exp;
9781 rtx if_false_label, if_true_label;
9782 {
9783 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9784 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9785 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9786 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9787 int i;
9788 rtx drop_through_label = 0;
9789
9790 if (! if_false_label)
9791 drop_through_label = if_false_label = gen_label_rtx ();
9792
9793 for (i = 0; i < nwords; i++)
9794 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9795 operand_subword_force (op1, i, mode),
9796 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9797 word_mode, NULL_RTX, 0, if_false_label,
9798 NULL_RTX);
9799
9800 if (if_true_label)
9801 emit_jump (if_true_label);
9802 if (drop_through_label)
9803 emit_label (drop_through_label);
9804 }
9805 \f
9806 /* Jump according to whether OP0 is 0.
9807 We assume that OP0 has an integer mode that is too wide
9808 for the available compare insns. */
9809
9810 void
9811 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9812 rtx op0;
9813 rtx if_false_label, if_true_label;
9814 {
9815 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9816 rtx part;
9817 int i;
9818 rtx drop_through_label = 0;
9819
9820 /* The fastest way of doing this comparison on almost any machine is to
9821 "or" all the words and compare the result. If all have to be loaded
9822 from memory and this is a very wide item, it's possible this may
9823 be slower, but that's highly unlikely. */
9824
9825 part = gen_reg_rtx (word_mode);
9826 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9827 for (i = 1; i < nwords && part != 0; i++)
9828 part = expand_binop (word_mode, ior_optab, part,
9829 operand_subword_force (op0, i, GET_MODE (op0)),
9830 part, 1, OPTAB_WIDEN);
9831
9832 if (part != 0)
9833 {
9834 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9835 NULL_RTX, 0, if_false_label, if_true_label);
9836
9837 return;
9838 }
9839
9840 /* If we couldn't do the "or" simply, do this with a series of compares. */
9841 if (! if_false_label)
9842 drop_through_label = if_false_label = gen_label_rtx ();
9843
9844 for (i = 0; i < nwords; i++)
9845 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9846 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9847 if_false_label, NULL_RTX);
9848
9849 if (if_true_label)
9850 emit_jump (if_true_label);
9851
9852 if (drop_through_label)
9853 emit_label (drop_through_label);
9854 }
9855 \f
9856 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9857 (including code to compute the values to be compared)
9858 and set (CC0) according to the result.
9859 The decision as to signed or unsigned comparison must be made by the caller.
9860
9861 We force a stack adjustment unless there are currently
9862 things pushed on the stack that aren't yet used.
9863
9864 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9865 compared.
9866
9867 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9868 size of MODE should be used. */
9869
9870 rtx
9871 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9872 register rtx op0, op1;
9873 enum rtx_code code;
9874 int unsignedp;
9875 enum machine_mode mode;
9876 rtx size;
9877 unsigned int align;
9878 {
9879 rtx tem;
9880
9881 /* If one operand is constant, make it the second one. Only do this
9882 if the other operand is not constant as well. */
9883
9884 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9885 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9886 {
9887 tem = op0;
9888 op0 = op1;
9889 op1 = tem;
9890 code = swap_condition (code);
9891 }
9892
9893 if (flag_force_mem)
9894 {
9895 op0 = force_not_mem (op0);
9896 op1 = force_not_mem (op1);
9897 }
9898
9899 do_pending_stack_adjust ();
9900
9901 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9902 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9903 return tem;
9904
9905 #if 0
9906 /* There's no need to do this now that combine.c can eliminate lots of
9907 sign extensions. This can be less efficient in certain cases on other
9908 machines. */
9909
9910 /* If this is a signed equality comparison, we can do it as an
9911 unsigned comparison since zero-extension is cheaper than sign
9912 extension and comparisons with zero are done as unsigned. This is
9913 the case even on machines that can do fast sign extension, since
9914 zero-extension is easier to combine with other operations than
9915 sign-extension is. If we are comparing against a constant, we must
9916 convert it to what it would look like unsigned. */
9917 if ((code == EQ || code == NE) && ! unsignedp
9918 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9919 {
9920 if (GET_CODE (op1) == CONST_INT
9921 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9922 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9923 unsignedp = 1;
9924 }
9925 #endif
9926
9927 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9928
9929 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9930 }
9931
9932 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9933 The decision as to signed or unsigned comparison must be made by the caller.
9934
9935 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9936 compared.
9937
9938 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9939 size of MODE should be used. */
9940
9941 void
9942 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9943 if_false_label, if_true_label)
9944 register rtx op0, op1;
9945 enum rtx_code code;
9946 int unsignedp;
9947 enum machine_mode mode;
9948 rtx size;
9949 unsigned int align;
9950 rtx if_false_label, if_true_label;
9951 {
9952 rtx tem;
9953 int dummy_true_label = 0;
9954
9955 /* Reverse the comparison if that is safe and we want to jump if it is
9956 false. */
9957 if (! if_true_label && ! FLOAT_MODE_P (mode))
9958 {
9959 if_true_label = if_false_label;
9960 if_false_label = 0;
9961 code = reverse_condition (code);
9962 }
9963
9964 /* If one operand is constant, make it the second one. Only do this
9965 if the other operand is not constant as well. */
9966
9967 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9968 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9969 {
9970 tem = op0;
9971 op0 = op1;
9972 op1 = tem;
9973 code = swap_condition (code);
9974 }
9975
9976 if (flag_force_mem)
9977 {
9978 op0 = force_not_mem (op0);
9979 op1 = force_not_mem (op1);
9980 }
9981
9982 do_pending_stack_adjust ();
9983
9984 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9985 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9986 {
9987 if (tem == const_true_rtx)
9988 {
9989 if (if_true_label)
9990 emit_jump (if_true_label);
9991 }
9992 else
9993 {
9994 if (if_false_label)
9995 emit_jump (if_false_label);
9996 }
9997 return;
9998 }
9999
10000 #if 0
10001 /* There's no need to do this now that combine.c can eliminate lots of
10002 sign extensions. This can be less efficient in certain cases on other
10003 machines. */
10004
10005 /* If this is a signed equality comparison, we can do it as an
10006 unsigned comparison since zero-extension is cheaper than sign
10007 extension and comparisons with zero are done as unsigned. This is
10008 the case even on machines that can do fast sign extension, since
10009 zero-extension is easier to combine with other operations than
10010 sign-extension is. If we are comparing against a constant, we must
10011 convert it to what it would look like unsigned. */
10012 if ((code == EQ || code == NE) && ! unsignedp
10013 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10014 {
10015 if (GET_CODE (op1) == CONST_INT
10016 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10017 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10018 unsignedp = 1;
10019 }
10020 #endif
10021
10022 if (! if_true_label)
10023 {
10024 dummy_true_label = 1;
10025 if_true_label = gen_label_rtx ();
10026 }
10027
10028 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10029 if_true_label);
10030
10031 if (if_false_label)
10032 emit_jump (if_false_label);
10033 if (dummy_true_label)
10034 emit_label (if_true_label);
10035 }
10036
10037 /* Generate code for a comparison expression EXP (including code to compute
10038 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10039 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10040 generated code will drop through.
10041 SIGNED_CODE should be the rtx operation for this comparison for
10042 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10043
10044 We force a stack adjustment unless there are currently
10045 things pushed on the stack that aren't yet used. */
10046
10047 static void
10048 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10049 if_true_label)
10050 register tree exp;
10051 enum rtx_code signed_code, unsigned_code;
10052 rtx if_false_label, if_true_label;
10053 {
10054 unsigned int align0, align1;
10055 register rtx op0, op1;
10056 register tree type;
10057 register enum machine_mode mode;
10058 int unsignedp;
10059 enum rtx_code code;
10060
10061 /* Don't crash if the comparison was erroneous. */
10062 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10063 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10064 return;
10065
10066 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10067 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10068 mode = TYPE_MODE (type);
10069 unsignedp = TREE_UNSIGNED (type);
10070 code = unsignedp ? unsigned_code : signed_code;
10071
10072 #ifdef HAVE_canonicalize_funcptr_for_compare
10073 /* If function pointers need to be "canonicalized" before they can
10074 be reliably compared, then canonicalize them. */
10075 if (HAVE_canonicalize_funcptr_for_compare
10076 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10077 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10078 == FUNCTION_TYPE))
10079 {
10080 rtx new_op0 = gen_reg_rtx (mode);
10081
10082 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10083 op0 = new_op0;
10084 }
10085
10086 if (HAVE_canonicalize_funcptr_for_compare
10087 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10088 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10089 == FUNCTION_TYPE))
10090 {
10091 rtx new_op1 = gen_reg_rtx (mode);
10092
10093 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10094 op1 = new_op1;
10095 }
10096 #endif
10097
10098 /* Do any postincrements in the expression that was tested. */
10099 emit_queue ();
10100
10101 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10102 ((mode == BLKmode)
10103 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10104 MIN (align0, align1),
10105 if_false_label, if_true_label);
10106 }
10107 \f
10108 /* Generate code to calculate EXP using a store-flag instruction
10109 and return an rtx for the result. EXP is either a comparison
10110 or a TRUTH_NOT_EXPR whose operand is a comparison.
10111
10112 If TARGET is nonzero, store the result there if convenient.
10113
10114 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10115 cheap.
10116
10117 Return zero if there is no suitable set-flag instruction
10118 available on this machine.
10119
10120 Once expand_expr has been called on the arguments of the comparison,
10121 we are committed to doing the store flag, since it is not safe to
10122 re-evaluate the expression. We emit the store-flag insn by calling
10123 emit_store_flag, but only expand the arguments if we have a reason
10124 to believe that emit_store_flag will be successful. If we think that
10125 it will, but it isn't, we have to simulate the store-flag with a
10126 set/jump/set sequence. */
10127
10128 static rtx
10129 do_store_flag (exp, target, mode, only_cheap)
10130 tree exp;
10131 rtx target;
10132 enum machine_mode mode;
10133 int only_cheap;
10134 {
10135 enum rtx_code code;
10136 tree arg0, arg1, type;
10137 tree tem;
10138 enum machine_mode operand_mode;
10139 int invert = 0;
10140 int unsignedp;
10141 rtx op0, op1;
10142 enum insn_code icode;
10143 rtx subtarget = target;
10144 rtx result, label;
10145
10146 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10147 result at the end. We can't simply invert the test since it would
10148 have already been inverted if it were valid. This case occurs for
10149 some floating-point comparisons. */
10150
10151 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10152 invert = 1, exp = TREE_OPERAND (exp, 0);
10153
10154 arg0 = TREE_OPERAND (exp, 0);
10155 arg1 = TREE_OPERAND (exp, 1);
10156 type = TREE_TYPE (arg0);
10157 operand_mode = TYPE_MODE (type);
10158 unsignedp = TREE_UNSIGNED (type);
10159
10160 /* We won't bother with BLKmode store-flag operations because it would mean
10161 passing a lot of information to emit_store_flag. */
10162 if (operand_mode == BLKmode)
10163 return 0;
10164
10165 /* We won't bother with store-flag operations involving function pointers
10166 when function pointers must be canonicalized before comparisons. */
10167 #ifdef HAVE_canonicalize_funcptr_for_compare
10168 if (HAVE_canonicalize_funcptr_for_compare
10169 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10170 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10171 == FUNCTION_TYPE))
10172 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10173 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10174 == FUNCTION_TYPE))))
10175 return 0;
10176 #endif
10177
10178 STRIP_NOPS (arg0);
10179 STRIP_NOPS (arg1);
10180
10181 /* Get the rtx comparison code to use. We know that EXP is a comparison
10182 operation of some type. Some comparisons against 1 and -1 can be
10183 converted to comparisons with zero. Do so here so that the tests
10184 below will be aware that we have a comparison with zero. These
10185 tests will not catch constants in the first operand, but constants
10186 are rarely passed as the first operand. */
10187
10188 switch (TREE_CODE (exp))
10189 {
10190 case EQ_EXPR:
10191 code = EQ;
10192 break;
10193 case NE_EXPR:
10194 code = NE;
10195 break;
10196 case LT_EXPR:
10197 if (integer_onep (arg1))
10198 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10199 else
10200 code = unsignedp ? LTU : LT;
10201 break;
10202 case LE_EXPR:
10203 if (! unsignedp && integer_all_onesp (arg1))
10204 arg1 = integer_zero_node, code = LT;
10205 else
10206 code = unsignedp ? LEU : LE;
10207 break;
10208 case GT_EXPR:
10209 if (! unsignedp && integer_all_onesp (arg1))
10210 arg1 = integer_zero_node, code = GE;
10211 else
10212 code = unsignedp ? GTU : GT;
10213 break;
10214 case GE_EXPR:
10215 if (integer_onep (arg1))
10216 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10217 else
10218 code = unsignedp ? GEU : GE;
10219 break;
10220
10221 case UNORDERED_EXPR:
10222 code = UNORDERED;
10223 break;
10224 case ORDERED_EXPR:
10225 code = ORDERED;
10226 break;
10227 case UNLT_EXPR:
10228 code = UNLT;
10229 break;
10230 case UNLE_EXPR:
10231 code = UNLE;
10232 break;
10233 case UNGT_EXPR:
10234 code = UNGT;
10235 break;
10236 case UNGE_EXPR:
10237 code = UNGE;
10238 break;
10239 case UNEQ_EXPR:
10240 code = UNEQ;
10241 break;
10242
10243 default:
10244 abort ();
10245 }
10246
10247 /* Put a constant second. */
10248 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10249 {
10250 tem = arg0; arg0 = arg1; arg1 = tem;
10251 code = swap_condition (code);
10252 }
10253
10254 /* If this is an equality or inequality test of a single bit, we can
10255 do this by shifting the bit being tested to the low-order bit and
10256 masking the result with the constant 1. If the condition was EQ,
10257 we xor it with 1. This does not require an scc insn and is faster
10258 than an scc insn even if we have it. */
10259
10260 if ((code == NE || code == EQ)
10261 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10262 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10263 {
10264 tree inner = TREE_OPERAND (arg0, 0);
10265 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10266 int ops_unsignedp;
10267
10268 /* If INNER is a right shift of a constant and it plus BITNUM does
10269 not overflow, adjust BITNUM and INNER. */
10270
10271 if (TREE_CODE (inner) == RSHIFT_EXPR
10272 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10273 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10274 && bitnum < TYPE_PRECISION (type)
10275 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10276 bitnum - TYPE_PRECISION (type)))
10277 {
10278 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10279 inner = TREE_OPERAND (inner, 0);
10280 }
10281
10282 /* If we are going to be able to omit the AND below, we must do our
10283 operations as unsigned. If we must use the AND, we have a choice.
10284 Normally unsigned is faster, but for some machines signed is. */
10285 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10286 #ifdef LOAD_EXTEND_OP
10287 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10288 #else
10289 : 1
10290 #endif
10291 );
10292
10293 if (! get_subtarget (subtarget)
10294 || GET_MODE (subtarget) != operand_mode
10295 || ! safe_from_p (subtarget, inner, 1))
10296 subtarget = 0;
10297
10298 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10299
10300 if (bitnum != 0)
10301 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10302 size_int (bitnum), subtarget, ops_unsignedp);
10303
10304 if (GET_MODE (op0) != mode)
10305 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10306
10307 if ((code == EQ && ! invert) || (code == NE && invert))
10308 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10309 ops_unsignedp, OPTAB_LIB_WIDEN);
10310
10311 /* Put the AND last so it can combine with more things. */
10312 if (bitnum != TYPE_PRECISION (type) - 1)
10313 op0 = expand_and (op0, const1_rtx, subtarget);
10314
10315 return op0;
10316 }
10317
10318 /* Now see if we are likely to be able to do this. Return if not. */
10319 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10320 return 0;
10321
10322 icode = setcc_gen_code[(int) code];
10323 if (icode == CODE_FOR_nothing
10324 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10325 {
10326 /* We can only do this if it is one of the special cases that
10327 can be handled without an scc insn. */
10328 if ((code == LT && integer_zerop (arg1))
10329 || (! only_cheap && code == GE && integer_zerop (arg1)))
10330 ;
10331 else if (BRANCH_COST >= 0
10332 && ! only_cheap && (code == NE || code == EQ)
10333 && TREE_CODE (type) != REAL_TYPE
10334 && ((abs_optab->handlers[(int) operand_mode].insn_code
10335 != CODE_FOR_nothing)
10336 || (ffs_optab->handlers[(int) operand_mode].insn_code
10337 != CODE_FOR_nothing)))
10338 ;
10339 else
10340 return 0;
10341 }
10342
10343 preexpand_calls (exp);
10344 if (! get_subtarget (target)
10345 || GET_MODE (subtarget) != operand_mode
10346 || ! safe_from_p (subtarget, arg1, 1))
10347 subtarget = 0;
10348
10349 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10350 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10351
10352 if (target == 0)
10353 target = gen_reg_rtx (mode);
10354
10355 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10356 because, if the emit_store_flag does anything it will succeed and
10357 OP0 and OP1 will not be used subsequently. */
10358
10359 result = emit_store_flag (target, code,
10360 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10361 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10362 operand_mode, unsignedp, 1);
10363
10364 if (result)
10365 {
10366 if (invert)
10367 result = expand_binop (mode, xor_optab, result, const1_rtx,
10368 result, 0, OPTAB_LIB_WIDEN);
10369 return result;
10370 }
10371
10372 /* If this failed, we have to do this with set/compare/jump/set code. */
10373 if (GET_CODE (target) != REG
10374 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10375 target = gen_reg_rtx (GET_MODE (target));
10376
10377 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10378 result = compare_from_rtx (op0, op1, code, unsignedp,
10379 operand_mode, NULL_RTX, 0);
10380 if (GET_CODE (result) == CONST_INT)
10381 return (((result == const0_rtx && ! invert)
10382 || (result != const0_rtx && invert))
10383 ? const0_rtx : const1_rtx);
10384
10385 label = gen_label_rtx ();
10386 if (bcc_gen_fctn[(int) code] == 0)
10387 abort ();
10388
10389 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10390 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10391 emit_label (label);
10392
10393 return target;
10394 }
10395 \f
10396 /* Generate a tablejump instruction (used for switch statements). */
10397
10398 #ifdef HAVE_tablejump
10399
10400 /* INDEX is the value being switched on, with the lowest value
10401 in the table already subtracted.
10402 MODE is its expected mode (needed if INDEX is constant).
10403 RANGE is the length of the jump table.
10404 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10405
10406 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10407 index value is out of range. */
10408
10409 void
10410 do_tablejump (index, mode, range, table_label, default_label)
10411 rtx index, range, table_label, default_label;
10412 enum machine_mode mode;
10413 {
10414 register rtx temp, vector;
10415
10416 /* Do an unsigned comparison (in the proper mode) between the index
10417 expression and the value which represents the length of the range.
10418 Since we just finished subtracting the lower bound of the range
10419 from the index expression, this comparison allows us to simultaneously
10420 check that the original index expression value is both greater than
10421 or equal to the minimum value of the range and less than or equal to
10422 the maximum value of the range. */
10423
10424 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10425 0, default_label);
10426
10427 /* If index is in range, it must fit in Pmode.
10428 Convert to Pmode so we can index with it. */
10429 if (mode != Pmode)
10430 index = convert_to_mode (Pmode, index, 1);
10431
10432 /* Don't let a MEM slip thru, because then INDEX that comes
10433 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10434 and break_out_memory_refs will go to work on it and mess it up. */
10435 #ifdef PIC_CASE_VECTOR_ADDRESS
10436 if (flag_pic && GET_CODE (index) != REG)
10437 index = copy_to_mode_reg (Pmode, index);
10438 #endif
10439
10440 /* If flag_force_addr were to affect this address
10441 it could interfere with the tricky assumptions made
10442 about addresses that contain label-refs,
10443 which may be valid only very near the tablejump itself. */
10444 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10445 GET_MODE_SIZE, because this indicates how large insns are. The other
10446 uses should all be Pmode, because they are addresses. This code
10447 could fail if addresses and insns are not the same size. */
10448 index = gen_rtx_PLUS (Pmode,
10449 gen_rtx_MULT (Pmode, index,
10450 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10451 gen_rtx_LABEL_REF (Pmode, table_label));
10452 #ifdef PIC_CASE_VECTOR_ADDRESS
10453 if (flag_pic)
10454 index = PIC_CASE_VECTOR_ADDRESS (index);
10455 else
10456 #endif
10457 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10458 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10459 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10460 RTX_UNCHANGING_P (vector) = 1;
10461 convert_move (temp, vector, 0);
10462
10463 emit_jump_insn (gen_tablejump (temp, table_label));
10464
10465 /* If we are generating PIC code or if the table is PC-relative, the
10466 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10467 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10468 emit_barrier ();
10469 }
10470
10471 #endif /* HAVE_tablejump */