02a27ba6b10b21fcdd2dccb5079a436e214273e2
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
47
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
51
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
60
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
63
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
66
67 #ifdef PUSH_ROUNDING
68
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
72
73 #endif
74
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
82
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
87
88 /* If this is nonzero, we do not bother generating VOLATILE
89 around volatile memory references, and we are willing to
90 output indirect addresses. If cse is to follow, we reject
91 indirect addresses so a useful potential cse is generated;
92 if it is used only once, instruction combination will produce
93 the same indirect address eventually. */
94 int cse_not_expected;
95
96 /* Nonzero to generate code for all the subroutines within an
97 expression before generating the upper levels of the expression.
98 Nowadays this is never zero. */
99 int do_preexpand_calls = 1;
100
101 /* Don't check memory usage, since code is being emitted to check a memory
102 usage. Used when current_function_check_memory_usage is true, to avoid
103 infinite recursion. */
104 static int in_check_memory_usage;
105
106 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
107 static tree placeholder_list = 0;
108
109 /* This structure is used by move_by_pieces to describe the move to
110 be performed. */
111 struct move_by_pieces
112 {
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 rtx from;
118 rtx from_addr;
119 int autinc_from;
120 int explicit_inc_from;
121 unsigned HOST_WIDE_INT len;
122 HOST_WIDE_INT offset;
123 int reverse;
124 };
125
126 /* This structure is used by clear_by_pieces to describe the clear to
127 be performed. */
128
129 struct clear_by_pieces
130 {
131 rtx to;
132 rtx to_addr;
133 int autinc_to;
134 int explicit_inc_to;
135 unsigned HOST_WIDE_INT len;
136 HOST_WIDE_INT offset;
137 int reverse;
138 };
139
140 extern struct obstack permanent_obstack;
141
142 static rtx get_push_address PARAMS ((int));
143
144 static rtx enqueue_insn PARAMS ((rtx, rtx));
145 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
146 PARAMS ((unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
149 struct move_by_pieces *));
150 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
151 unsigned int));
152 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
153 enum machine_mode,
154 struct clear_by_pieces *));
155 static rtx get_subtarget PARAMS ((rtx));
156 static int is_zeros_p PARAMS ((tree));
157 static int mostly_zeros_p PARAMS ((tree));
158 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, tree, unsigned int, int));
161 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
162 HOST_WIDE_INT));
163 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
164 HOST_WIDE_INT, enum machine_mode,
165 tree, enum machine_mode, int,
166 unsigned int, HOST_WIDE_INT, int));
167 static enum memory_use_mode
168 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
169 static tree save_noncopied_parts PARAMS ((tree, tree));
170 static tree init_noncopied_parts PARAMS ((tree, tree));
171 static int safe_from_p PARAMS ((rtx, tree, int));
172 static int fixed_type_p PARAMS ((tree));
173 static rtx var_rtx PARAMS ((tree));
174 static int readonly_fields_p PARAMS ((tree));
175 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
176 static rtx expand_increment PARAMS ((tree, int, int));
177 static void preexpand_calls PARAMS ((tree));
178 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
179 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
180 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
181 rtx, rtx));
182 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
183
184 /* Record for each mode whether we can move a register directly to or
185 from an object of that mode in memory. If we can't, we won't try
186 to use that mode directly when accessing a field of that mode. */
187
188 static char direct_load[NUM_MACHINE_MODES];
189 static char direct_store[NUM_MACHINE_MODES];
190
191 /* If a memory-to-memory move would take MOVE_RATIO or more simple
192 move-instruction sequences, we will do a movstr or libcall instead. */
193
194 #ifndef MOVE_RATIO
195 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
196 #define MOVE_RATIO 2
197 #else
198 /* If we are optimizing for space (-Os), cut down the default move ratio. */
199 #define MOVE_RATIO (optimize_size ? 3 : 15)
200 #endif
201 #endif
202
203 /* This macro is used to determine whether move_by_pieces should be called
204 to perform a structure copy. */
205 #ifndef MOVE_BY_PIECES_P
206 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
207 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
208 #endif
209
210 /* This array records the insn_code of insns to perform block moves. */
211 enum insn_code movstr_optab[NUM_MACHINE_MODES];
212
213 /* This array records the insn_code of insns to perform block clears. */
214 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
215
216 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
217
218 #ifndef SLOW_UNALIGNED_ACCESS
219 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
220 #endif
221 \f
222 /* This is run once per compilation to set up which modes can be used
223 directly in memory and to initialize the block move optab. */
224
225 void
226 init_expr_once ()
227 {
228 rtx insn, pat;
229 enum machine_mode mode;
230 int num_clobbers;
231 rtx mem, mem1;
232 char *free_point;
233
234 start_sequence ();
235
236 /* Since we are on the permanent obstack, we must be sure we save this
237 spot AFTER we call start_sequence, since it will reuse the rtl it
238 makes. */
239 free_point = (char *) oballoc (0);
240
241 /* Try indexing by frame ptr and try by stack ptr.
242 It is known that on the Convex the stack ptr isn't a valid index.
243 With luck, one or the other is valid on any machine. */
244 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
245 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
246
247 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
248 pat = PATTERN (insn);
249
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
252 {
253 int regno;
254 rtx reg;
255
256 direct_load[(int) mode] = direct_store[(int) mode] = 0;
257 PUT_MODE (mem, mode);
258 PUT_MODE (mem1, mode);
259
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
262
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
267 {
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
270
271 reg = gen_rtx_REG (mode, regno);
272
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
277
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
282
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
287
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
292 }
293 }
294
295 end_sequence ();
296 obfree (free_point);
297 }
298
299 /* This is run at the start of compiling a function. */
300
301 void
302 init_expr ()
303 {
304 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
305
306 pending_chain = 0;
307 pending_stack_adjust = 0;
308 stack_pointer_delta = 0;
309 inhibit_defer_pop = 0;
310 saveregs_value = 0;
311 apply_args_value = 0;
312 forced_labels = 0;
313 }
314
315 void
316 mark_expr_status (p)
317 struct expr_status *p;
318 {
319 if (p == NULL)
320 return;
321
322 ggc_mark_rtx (p->x_saveregs_value);
323 ggc_mark_rtx (p->x_apply_args_value);
324 ggc_mark_rtx (p->x_forced_labels);
325 }
326
327 void
328 free_expr_status (f)
329 struct function *f;
330 {
331 free (f->expr);
332 f->expr = NULL;
333 }
334
335 /* Small sanity check that the queue is empty at the end of a function. */
336
337 void
338 finish_expr_for_function ()
339 {
340 if (pending_chain)
341 abort ();
342 }
343 \f
344 /* Manage the queue of increment instructions to be output
345 for POSTINCREMENT_EXPR expressions, etc. */
346
347 /* Queue up to increment (or change) VAR later. BODY says how:
348 BODY should be the same thing you would pass to emit_insn
349 to increment right away. It will go to emit_insn later on.
350
351 The value is a QUEUED expression to be used in place of VAR
352 where you want to guarantee the pre-incrementation value of VAR. */
353
354 static rtx
355 enqueue_insn (var, body)
356 rtx var, body;
357 {
358 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
359 body, pending_chain);
360 return pending_chain;
361 }
362
363 /* Use protect_from_queue to convert a QUEUED expression
364 into something that you can put immediately into an instruction.
365 If the queued incrementation has not happened yet,
366 protect_from_queue returns the variable itself.
367 If the incrementation has happened, protect_from_queue returns a temp
368 that contains a copy of the old value of the variable.
369
370 Any time an rtx which might possibly be a QUEUED is to be put
371 into an instruction, it must be passed through protect_from_queue first.
372 QUEUED expressions are not meaningful in instructions.
373
374 Do not pass a value through protect_from_queue and then hold
375 on to it for a while before putting it in an instruction!
376 If the queue is flushed in between, incorrect code will result. */
377
378 rtx
379 protect_from_queue (x, modify)
380 register rtx x;
381 int modify;
382 {
383 register RTX_CODE code = GET_CODE (x);
384
385 #if 0 /* A QUEUED can hang around after the queue is forced out. */
386 /* Shortcut for most common case. */
387 if (pending_chain == 0)
388 return x;
389 #endif
390
391 if (code != QUEUED)
392 {
393 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
394 use of autoincrement. Make a copy of the contents of the memory
395 location rather than a copy of the address, but not if the value is
396 of mode BLKmode. Don't modify X in place since it might be
397 shared. */
398 if (code == MEM && GET_MODE (x) != BLKmode
399 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
400 {
401 register rtx y = XEXP (x, 0);
402 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
403
404 MEM_COPY_ATTRIBUTES (new, x);
405
406 if (QUEUED_INSN (y))
407 {
408 register rtx temp = gen_reg_rtx (GET_MODE (new));
409 emit_insn_before (gen_move_insn (temp, new),
410 QUEUED_INSN (y));
411 return temp;
412 }
413 return new;
414 }
415 /* Otherwise, recursively protect the subexpressions of all
416 the kinds of rtx's that can contain a QUEUED. */
417 if (code == MEM)
418 {
419 rtx tem = protect_from_queue (XEXP (x, 0), 0);
420 if (tem != XEXP (x, 0))
421 {
422 x = copy_rtx (x);
423 XEXP (x, 0) = tem;
424 }
425 }
426 else if (code == PLUS || code == MULT)
427 {
428 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
429 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
430 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
431 {
432 x = copy_rtx (x);
433 XEXP (x, 0) = new0;
434 XEXP (x, 1) = new1;
435 }
436 }
437 return x;
438 }
439 /* If the increment has not happened, use the variable itself. */
440 if (QUEUED_INSN (x) == 0)
441 return QUEUED_VAR (x);
442 /* If the increment has happened and a pre-increment copy exists,
443 use that copy. */
444 if (QUEUED_COPY (x) != 0)
445 return QUEUED_COPY (x);
446 /* The increment has happened but we haven't set up a pre-increment copy.
447 Set one up now, and use it. */
448 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
449 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
450 QUEUED_INSN (x));
451 return QUEUED_COPY (x);
452 }
453
454 /* Return nonzero if X contains a QUEUED expression:
455 if it contains anything that will be altered by a queued increment.
456 We handle only combinations of MEM, PLUS, MINUS and MULT operators
457 since memory addresses generally contain only those. */
458
459 int
460 queued_subexp_p (x)
461 rtx x;
462 {
463 register enum rtx_code code = GET_CODE (x);
464 switch (code)
465 {
466 case QUEUED:
467 return 1;
468 case MEM:
469 return queued_subexp_p (XEXP (x, 0));
470 case MULT:
471 case PLUS:
472 case MINUS:
473 return (queued_subexp_p (XEXP (x, 0))
474 || queued_subexp_p (XEXP (x, 1)));
475 default:
476 return 0;
477 }
478 }
479
480 /* Perform all the pending incrementations. */
481
482 void
483 emit_queue ()
484 {
485 register rtx p;
486 while ((p = pending_chain))
487 {
488 rtx body = QUEUED_BODY (p);
489
490 if (GET_CODE (body) == SEQUENCE)
491 {
492 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
493 emit_insn (QUEUED_BODY (p));
494 }
495 else
496 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
497 pending_chain = QUEUED_NEXT (p);
498 }
499 }
500 \f
501 /* Copy data from FROM to TO, where the machine modes are not the same.
502 Both modes may be integer, or both may be floating.
503 UNSIGNEDP should be nonzero if FROM is an unsigned type.
504 This causes zero-extension instead of sign-extension. */
505
506 void
507 convert_move (to, from, unsignedp)
508 register rtx to, from;
509 int unsignedp;
510 {
511 enum machine_mode to_mode = GET_MODE (to);
512 enum machine_mode from_mode = GET_MODE (from);
513 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
514 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
515 enum insn_code code;
516 rtx libcall;
517
518 /* rtx code for making an equivalent value. */
519 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
520
521 to = protect_from_queue (to, 1);
522 from = protect_from_queue (from, 0);
523
524 if (to_real != from_real)
525 abort ();
526
527 /* If FROM is a SUBREG that indicates that we have already done at least
528 the required extension, strip it. We don't handle such SUBREGs as
529 TO here. */
530
531 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
532 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
533 >= GET_MODE_SIZE (to_mode))
534 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
535 from = gen_lowpart (to_mode, from), from_mode = to_mode;
536
537 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
538 abort ();
539
540 if (to_mode == from_mode
541 || (from_mode == VOIDmode && CONSTANT_P (from)))
542 {
543 emit_move_insn (to, from);
544 return;
545 }
546
547 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
548 {
549 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
550 abort ();
551
552 if (VECTOR_MODE_P (to_mode))
553 from = gen_rtx_SUBREG (to_mode, from, 0);
554 else
555 to = gen_rtx_SUBREG (from_mode, to, 0);
556
557 emit_move_insn (to, from);
558 return;
559 }
560
561 if (to_real != from_real)
562 abort ();
563
564 if (to_real)
565 {
566 rtx value;
567
568 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
569 {
570 /* Try converting directly if the insn is supported. */
571 if ((code = can_extend_p (to_mode, from_mode, 0))
572 != CODE_FOR_nothing)
573 {
574 emit_unop_insn (code, to, from, UNKNOWN);
575 return;
576 }
577 }
578
579 #ifdef HAVE_trunchfqf2
580 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_trunctqfqf2
587 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593 #ifdef HAVE_truncsfqf2
594 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
597 return;
598 }
599 #endif
600 #ifdef HAVE_truncdfqf2
601 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
604 return;
605 }
606 #endif
607 #ifdef HAVE_truncxfqf2
608 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
609 {
610 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
611 return;
612 }
613 #endif
614 #ifdef HAVE_trunctfqf2
615 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
616 {
617 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
618 return;
619 }
620 #endif
621
622 #ifdef HAVE_trunctqfhf2
623 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629 #ifdef HAVE_truncsfhf2
630 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
633 return;
634 }
635 #endif
636 #ifdef HAVE_truncdfhf2
637 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
638 {
639 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
640 return;
641 }
642 #endif
643 #ifdef HAVE_truncxfhf2
644 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
645 {
646 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
647 return;
648 }
649 #endif
650 #ifdef HAVE_trunctfhf2
651 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
652 {
653 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
654 return;
655 }
656 #endif
657
658 #ifdef HAVE_truncsftqf2
659 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
660 {
661 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665 #ifdef HAVE_truncdftqf2
666 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
667 {
668 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
669 return;
670 }
671 #endif
672 #ifdef HAVE_truncxftqf2
673 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
674 {
675 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
676 return;
677 }
678 #endif
679 #ifdef HAVE_trunctftqf2
680 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
681 {
682 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
683 return;
684 }
685 #endif
686
687 #ifdef HAVE_truncdfsf2
688 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
689 {
690 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_truncxfsf2
695 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
696 {
697 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701 #ifdef HAVE_trunctfsf2
702 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
703 {
704 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
705 return;
706 }
707 #endif
708 #ifdef HAVE_truncxfdf2
709 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
710 {
711 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
712 return;
713 }
714 #endif
715 #ifdef HAVE_trunctfdf2
716 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
717 {
718 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
719 return;
720 }
721 #endif
722
723 libcall = (rtx) 0;
724 switch (from_mode)
725 {
726 case SFmode:
727 switch (to_mode)
728 {
729 case DFmode:
730 libcall = extendsfdf2_libfunc;
731 break;
732
733 case XFmode:
734 libcall = extendsfxf2_libfunc;
735 break;
736
737 case TFmode:
738 libcall = extendsftf2_libfunc;
739 break;
740
741 default:
742 break;
743 }
744 break;
745
746 case DFmode:
747 switch (to_mode)
748 {
749 case SFmode:
750 libcall = truncdfsf2_libfunc;
751 break;
752
753 case XFmode:
754 libcall = extenddfxf2_libfunc;
755 break;
756
757 case TFmode:
758 libcall = extenddftf2_libfunc;
759 break;
760
761 default:
762 break;
763 }
764 break;
765
766 case XFmode:
767 switch (to_mode)
768 {
769 case SFmode:
770 libcall = truncxfsf2_libfunc;
771 break;
772
773 case DFmode:
774 libcall = truncxfdf2_libfunc;
775 break;
776
777 default:
778 break;
779 }
780 break;
781
782 case TFmode:
783 switch (to_mode)
784 {
785 case SFmode:
786 libcall = trunctfsf2_libfunc;
787 break;
788
789 case DFmode:
790 libcall = trunctfdf2_libfunc;
791 break;
792
793 default:
794 break;
795 }
796 break;
797
798 default:
799 break;
800 }
801
802 if (libcall == (rtx) 0)
803 /* This conversion is not implemented yet. */
804 abort ();
805
806 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
807 1, from, from_mode);
808 emit_move_insn (to, value);
809 return;
810 }
811
812 /* Now both modes are integers. */
813
814 /* Handle expanding beyond a word. */
815 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
816 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
817 {
818 rtx insns;
819 rtx lowpart;
820 rtx fill_value;
821 rtx lowfrom;
822 int i;
823 enum machine_mode lowpart_mode;
824 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
825
826 /* Try converting directly if the insn is supported. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
829 {
830 /* If FROM is a SUBREG, put it into a register. Do this
831 so that we always generate the same set of insns for
832 better cse'ing; if an intermediate assignment occurred,
833 we won't be doing the operation directly on the SUBREG. */
834 if (optimize > 0 && GET_CODE (from) == SUBREG)
835 from = force_reg (from_mode, from);
836 emit_unop_insn (code, to, from, equiv_code);
837 return;
838 }
839 /* Next, try converting via full word. */
840 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
841 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
842 != CODE_FOR_nothing))
843 {
844 if (GET_CODE (to) == REG)
845 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
846 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
847 emit_unop_insn (code, to,
848 gen_lowpart (word_mode, to), equiv_code);
849 return;
850 }
851
852 /* No special multiword conversion insn; do it by hand. */
853 start_sequence ();
854
855 /* Since we will turn this into a no conflict block, we must ensure
856 that the source does not overlap the target. */
857
858 if (reg_overlap_mentioned_p (to, from))
859 from = force_reg (from_mode, from);
860
861 /* Get a copy of FROM widened to a word, if necessary. */
862 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
863 lowpart_mode = word_mode;
864 else
865 lowpart_mode = from_mode;
866
867 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
868
869 lowpart = gen_lowpart (lowpart_mode, to);
870 emit_move_insn (lowpart, lowfrom);
871
872 /* Compute the value to put in each remaining word. */
873 if (unsignedp)
874 fill_value = const0_rtx;
875 else
876 {
877 #ifdef HAVE_slt
878 if (HAVE_slt
879 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
880 && STORE_FLAG_VALUE == -1)
881 {
882 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
883 lowpart_mode, 0, 0);
884 fill_value = gen_reg_rtx (word_mode);
885 emit_insn (gen_slt (fill_value));
886 }
887 else
888 #endif
889 {
890 fill_value
891 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
892 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
893 NULL_RTX, 0);
894 fill_value = convert_to_mode (word_mode, fill_value, 1);
895 }
896 }
897
898 /* Fill the remaining words. */
899 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
900 {
901 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
902 rtx subword = operand_subword (to, index, 1, to_mode);
903
904 if (subword == 0)
905 abort ();
906
907 if (fill_value != subword)
908 emit_move_insn (subword, fill_value);
909 }
910
911 insns = get_insns ();
912 end_sequence ();
913
914 emit_no_conflict_block (insns, to, from, NULL_RTX,
915 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
916 return;
917 }
918
919 /* Truncating multi-word to a word or less. */
920 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
921 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
922 {
923 if (!((GET_CODE (from) == MEM
924 && ! MEM_VOLATILE_P (from)
925 && direct_load[(int) to_mode]
926 && ! mode_dependent_address_p (XEXP (from, 0)))
927 || GET_CODE (from) == REG
928 || GET_CODE (from) == SUBREG))
929 from = force_reg (from_mode, from);
930 convert_move (to, gen_lowpart (word_mode, from), 0);
931 return;
932 }
933
934 /* Handle pointer conversion. */ /* SPEE 900220. */
935 if (to_mode == PQImode)
936 {
937 if (from_mode != QImode)
938 from = convert_to_mode (QImode, from, unsignedp);
939
940 #ifdef HAVE_truncqipqi2
941 if (HAVE_truncqipqi2)
942 {
943 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 return;
945 }
946 #endif /* HAVE_truncqipqi2 */
947 abort ();
948 }
949
950 if (from_mode == PQImode)
951 {
952 if (to_mode != QImode)
953 {
954 from = convert_to_mode (QImode, from, unsignedp);
955 from_mode = QImode;
956 }
957 else
958 {
959 #ifdef HAVE_extendpqiqi2
960 if (HAVE_extendpqiqi2)
961 {
962 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 return;
964 }
965 #endif /* HAVE_extendpqiqi2 */
966 abort ();
967 }
968 }
969
970 if (to_mode == PSImode)
971 {
972 if (from_mode != SImode)
973 from = convert_to_mode (SImode, from, unsignedp);
974
975 #ifdef HAVE_truncsipsi2
976 if (HAVE_truncsipsi2)
977 {
978 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 return;
980 }
981 #endif /* HAVE_truncsipsi2 */
982 abort ();
983 }
984
985 if (from_mode == PSImode)
986 {
987 if (to_mode != SImode)
988 {
989 from = convert_to_mode (SImode, from, unsignedp);
990 from_mode = SImode;
991 }
992 else
993 {
994 #ifdef HAVE_extendpsisi2
995 if (HAVE_extendpsisi2)
996 {
997 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 return;
999 }
1000 #endif /* HAVE_extendpsisi2 */
1001 abort ();
1002 }
1003 }
1004
1005 if (to_mode == PDImode)
1006 {
1007 if (from_mode != DImode)
1008 from = convert_to_mode (DImode, from, unsignedp);
1009
1010 #ifdef HAVE_truncdipdi2
1011 if (HAVE_truncdipdi2)
1012 {
1013 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1014 return;
1015 }
1016 #endif /* HAVE_truncdipdi2 */
1017 abort ();
1018 }
1019
1020 if (from_mode == PDImode)
1021 {
1022 if (to_mode != DImode)
1023 {
1024 from = convert_to_mode (DImode, from, unsignedp);
1025 from_mode = DImode;
1026 }
1027 else
1028 {
1029 #ifdef HAVE_extendpdidi2
1030 if (HAVE_extendpdidi2)
1031 {
1032 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1033 return;
1034 }
1035 #endif /* HAVE_extendpdidi2 */
1036 abort ();
1037 }
1038 }
1039
1040 /* Now follow all the conversions between integers
1041 no more than a word long. */
1042
1043 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1044 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1045 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1046 GET_MODE_BITSIZE (from_mode)))
1047 {
1048 if (!((GET_CODE (from) == MEM
1049 && ! MEM_VOLATILE_P (from)
1050 && direct_load[(int) to_mode]
1051 && ! mode_dependent_address_p (XEXP (from, 0)))
1052 || GET_CODE (from) == REG
1053 || GET_CODE (from) == SUBREG))
1054 from = force_reg (from_mode, from);
1055 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1056 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1057 from = copy_to_reg (from);
1058 emit_move_insn (to, gen_lowpart (to_mode, from));
1059 return;
1060 }
1061
1062 /* Handle extension. */
1063 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1064 {
1065 /* Convert directly if that works. */
1066 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1067 != CODE_FOR_nothing)
1068 {
1069 emit_unop_insn (code, to, from, equiv_code);
1070 return;
1071 }
1072 else
1073 {
1074 enum machine_mode intermediate;
1075 rtx tmp;
1076 tree shift_amount;
1077
1078 /* Search for a mode to convert via. */
1079 for (intermediate = from_mode; intermediate != VOIDmode;
1080 intermediate = GET_MODE_WIDER_MODE (intermediate))
1081 if (((can_extend_p (to_mode, intermediate, unsignedp)
1082 != CODE_FOR_nothing)
1083 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1084 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1085 GET_MODE_BITSIZE (intermediate))))
1086 && (can_extend_p (intermediate, from_mode, unsignedp)
1087 != CODE_FOR_nothing))
1088 {
1089 convert_move (to, convert_to_mode (intermediate, from,
1090 unsignedp), unsignedp);
1091 return;
1092 }
1093
1094 /* No suitable intermediate mode.
1095 Generate what we need with shifts. */
1096 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1097 - GET_MODE_BITSIZE (from_mode), 0);
1098 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1099 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1100 to, unsignedp);
1101 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1102 to, unsignedp);
1103 if (tmp != to)
1104 emit_move_insn (to, tmp);
1105 return;
1106 }
1107 }
1108
1109 /* Support special truncate insns for certain modes. */
1110
1111 if (from_mode == DImode && to_mode == SImode)
1112 {
1113 #ifdef HAVE_truncdisi2
1114 if (HAVE_truncdisi2)
1115 {
1116 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1117 return;
1118 }
1119 #endif
1120 convert_move (to, force_reg (from_mode, from), unsignedp);
1121 return;
1122 }
1123
1124 if (from_mode == DImode && to_mode == HImode)
1125 {
1126 #ifdef HAVE_truncdihi2
1127 if (HAVE_truncdihi2)
1128 {
1129 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1130 return;
1131 }
1132 #endif
1133 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 return;
1135 }
1136
1137 if (from_mode == DImode && to_mode == QImode)
1138 {
1139 #ifdef HAVE_truncdiqi2
1140 if (HAVE_truncdiqi2)
1141 {
1142 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1143 return;
1144 }
1145 #endif
1146 convert_move (to, force_reg (from_mode, from), unsignedp);
1147 return;
1148 }
1149
1150 if (from_mode == SImode && to_mode == HImode)
1151 {
1152 #ifdef HAVE_truncsihi2
1153 if (HAVE_truncsihi2)
1154 {
1155 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1156 return;
1157 }
1158 #endif
1159 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 return;
1161 }
1162
1163 if (from_mode == SImode && to_mode == QImode)
1164 {
1165 #ifdef HAVE_truncsiqi2
1166 if (HAVE_truncsiqi2)
1167 {
1168 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1169 return;
1170 }
1171 #endif
1172 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 return;
1174 }
1175
1176 if (from_mode == HImode && to_mode == QImode)
1177 {
1178 #ifdef HAVE_trunchiqi2
1179 if (HAVE_trunchiqi2)
1180 {
1181 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1182 return;
1183 }
1184 #endif
1185 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 return;
1187 }
1188
1189 if (from_mode == TImode && to_mode == DImode)
1190 {
1191 #ifdef HAVE_trunctidi2
1192 if (HAVE_trunctidi2)
1193 {
1194 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1195 return;
1196 }
1197 #endif
1198 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 return;
1200 }
1201
1202 if (from_mode == TImode && to_mode == SImode)
1203 {
1204 #ifdef HAVE_trunctisi2
1205 if (HAVE_trunctisi2)
1206 {
1207 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1208 return;
1209 }
1210 #endif
1211 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 return;
1213 }
1214
1215 if (from_mode == TImode && to_mode == HImode)
1216 {
1217 #ifdef HAVE_trunctihi2
1218 if (HAVE_trunctihi2)
1219 {
1220 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1221 return;
1222 }
1223 #endif
1224 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 return;
1226 }
1227
1228 if (from_mode == TImode && to_mode == QImode)
1229 {
1230 #ifdef HAVE_trunctiqi2
1231 if (HAVE_trunctiqi2)
1232 {
1233 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1234 return;
1235 }
1236 #endif
1237 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 return;
1239 }
1240
1241 /* Handle truncation of volatile memrefs, and so on;
1242 the things that couldn't be truncated directly,
1243 and for which there was no special instruction. */
1244 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1245 {
1246 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1247 emit_move_insn (to, temp);
1248 return;
1249 }
1250
1251 /* Mode combination is not recognized. */
1252 abort ();
1253 }
1254
1255 /* Return an rtx for a value that would result
1256 from converting X to mode MODE.
1257 Both X and MODE may be floating, or both integer.
1258 UNSIGNEDP is nonzero if X is an unsigned value.
1259 This can be done by referring to a part of X in place
1260 or by copying to a new temporary with conversion.
1261
1262 This function *must not* call protect_from_queue
1263 except when putting X into an insn (in which case convert_move does it). */
1264
1265 rtx
1266 convert_to_mode (mode, x, unsignedp)
1267 enum machine_mode mode;
1268 rtx x;
1269 int unsignedp;
1270 {
1271 return convert_modes (mode, VOIDmode, x, unsignedp);
1272 }
1273
1274 /* Return an rtx for a value that would result
1275 from converting X from mode OLDMODE to mode MODE.
1276 Both modes may be floating, or both integer.
1277 UNSIGNEDP is nonzero if X is an unsigned value.
1278
1279 This can be done by referring to a part of X in place
1280 or by copying to a new temporary with conversion.
1281
1282 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1283
1284 This function *must not* call protect_from_queue
1285 except when putting X into an insn (in which case convert_move does it). */
1286
1287 rtx
1288 convert_modes (mode, oldmode, x, unsignedp)
1289 enum machine_mode mode, oldmode;
1290 rtx x;
1291 int unsignedp;
1292 {
1293 register rtx temp;
1294
1295 /* If FROM is a SUBREG that indicates that we have already done at least
1296 the required extension, strip it. */
1297
1298 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1299 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1300 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1301 x = gen_lowpart (mode, x);
1302
1303 if (GET_MODE (x) != VOIDmode)
1304 oldmode = GET_MODE (x);
1305
1306 if (mode == oldmode)
1307 return x;
1308
1309 /* There is one case that we must handle specially: If we are converting
1310 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1311 we are to interpret the constant as unsigned, gen_lowpart will do
1312 the wrong if the constant appears negative. What we want to do is
1313 make the high-order word of the constant zero, not all ones. */
1314
1315 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1316 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1317 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1318 {
1319 HOST_WIDE_INT val = INTVAL (x);
1320
1321 if (oldmode != VOIDmode
1322 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1323 {
1324 int width = GET_MODE_BITSIZE (oldmode);
1325
1326 /* We need to zero extend VAL. */
1327 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1328 }
1329
1330 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1331 }
1332
1333 /* We can do this with a gen_lowpart if both desired and current modes
1334 are integer, and this is either a constant integer, a register, or a
1335 non-volatile MEM. Except for the constant case where MODE is no
1336 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1337
1338 if ((GET_CODE (x) == CONST_INT
1339 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1340 || (GET_MODE_CLASS (mode) == MODE_INT
1341 && GET_MODE_CLASS (oldmode) == MODE_INT
1342 && (GET_CODE (x) == CONST_DOUBLE
1343 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1344 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1345 && direct_load[(int) mode])
1346 || (GET_CODE (x) == REG
1347 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1348 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1349 {
1350 /* ?? If we don't know OLDMODE, we have to assume here that
1351 X does not need sign- or zero-extension. This may not be
1352 the case, but it's the best we can do. */
1353 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1354 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1355 {
1356 HOST_WIDE_INT val = INTVAL (x);
1357 int width = GET_MODE_BITSIZE (oldmode);
1358
1359 /* We must sign or zero-extend in this case. Start by
1360 zero-extending, then sign extend if we need to. */
1361 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1362 if (! unsignedp
1363 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1364 val |= (HOST_WIDE_INT) (-1) << width;
1365
1366 return GEN_INT (val);
1367 }
1368
1369 return gen_lowpart (mode, x);
1370 }
1371
1372 temp = gen_reg_rtx (mode);
1373 convert_move (temp, x, unsignedp);
1374 return temp;
1375 }
1376 \f
1377 /* This macro is used to determine what the largest unit size that
1378 move_by_pieces can use is. */
1379
1380 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1381 move efficiently, as opposed to MOVE_MAX which is the maximum
1382 number of bytes we can move with a single instruction. */
1383
1384 #ifndef MOVE_MAX_PIECES
1385 #define MOVE_MAX_PIECES MOVE_MAX
1386 #endif
1387
1388 /* Generate several move instructions to copy LEN bytes
1389 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1390 The caller must pass FROM and TO
1391 through protect_from_queue before calling.
1392 ALIGN is maximum alignment we can assume. */
1393
1394 void
1395 move_by_pieces (to, from, len, align)
1396 rtx to, from;
1397 unsigned HOST_WIDE_INT len;
1398 unsigned int align;
1399 {
1400 struct move_by_pieces data;
1401 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1402 unsigned int max_size = MOVE_MAX_PIECES + 1;
1403 enum machine_mode mode = VOIDmode, tmode;
1404 enum insn_code icode;
1405
1406 data.offset = 0;
1407 data.to_addr = to_addr;
1408 data.from_addr = from_addr;
1409 data.to = to;
1410 data.from = from;
1411 data.autinc_to
1412 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1413 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1414 data.autinc_from
1415 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1416 || GET_CODE (from_addr) == POST_INC
1417 || GET_CODE (from_addr) == POST_DEC);
1418
1419 data.explicit_inc_from = 0;
1420 data.explicit_inc_to = 0;
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1423 if (data.reverse) data.offset = len;
1424 data.len = len;
1425
1426 /* If copying requires more than two move insns,
1427 copy addresses to registers (to make displacements shorter)
1428 and use post-increment if available. */
1429 if (!(data.autinc_from && data.autinc_to)
1430 && move_by_pieces_ninsns (len, align) > 2)
1431 {
1432 /* Find the mode of the largest move... */
1433 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1434 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1435 if (GET_MODE_SIZE (tmode) < max_size)
1436 mode = tmode;
1437
1438 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1439 {
1440 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1441 data.autinc_from = 1;
1442 data.explicit_inc_from = -1;
1443 }
1444 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1445 {
1446 data.from_addr = copy_addr_to_reg (from_addr);
1447 data.autinc_from = 1;
1448 data.explicit_inc_from = 1;
1449 }
1450 if (!data.autinc_from && CONSTANT_P (from_addr))
1451 data.from_addr = copy_addr_to_reg (from_addr);
1452 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1453 {
1454 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1455 data.autinc_to = 1;
1456 data.explicit_inc_to = -1;
1457 }
1458 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1459 {
1460 data.to_addr = copy_addr_to_reg (to_addr);
1461 data.autinc_to = 1;
1462 data.explicit_inc_to = 1;
1463 }
1464 if (!data.autinc_to && CONSTANT_P (to_addr))
1465 data.to_addr = copy_addr_to_reg (to_addr);
1466 }
1467
1468 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1469 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1470 align = MOVE_MAX * BITS_PER_UNIT;
1471
1472 /* First move what we can in the largest integer mode, then go to
1473 successively smaller modes. */
1474
1475 while (max_size > 1)
1476 {
1477 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1478 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1479 if (GET_MODE_SIZE (tmode) < max_size)
1480 mode = tmode;
1481
1482 if (mode == VOIDmode)
1483 break;
1484
1485 icode = mov_optab->handlers[(int) mode].insn_code;
1486 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1487 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1488
1489 max_size = GET_MODE_SIZE (mode);
1490 }
1491
1492 /* The code above should have handled everything. */
1493 if (data.len > 0)
1494 abort ();
1495 }
1496
1497 /* Return number of insns required to move L bytes by pieces.
1498 ALIGN (in bytes) is maximum alignment we can assume. */
1499
1500 static unsigned HOST_WIDE_INT
1501 move_by_pieces_ninsns (l, align)
1502 unsigned HOST_WIDE_INT l;
1503 unsigned int align;
1504 {
1505 unsigned HOST_WIDE_INT n_insns = 0;
1506 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1507
1508 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1509 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1510 align = MOVE_MAX * BITS_PER_UNIT;
1511
1512 while (max_size > 1)
1513 {
1514 enum machine_mode mode = VOIDmode, tmode;
1515 enum insn_code icode;
1516
1517 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1518 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1519 if (GET_MODE_SIZE (tmode) < max_size)
1520 mode = tmode;
1521
1522 if (mode == VOIDmode)
1523 break;
1524
1525 icode = mov_optab->handlers[(int) mode].insn_code;
1526 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1527 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1528
1529 max_size = GET_MODE_SIZE (mode);
1530 }
1531
1532 return n_insns;
1533 }
1534
1535 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1536 with move instructions for mode MODE. GENFUN is the gen_... function
1537 to make a move insn for that mode. DATA has all the other info. */
1538
1539 static void
1540 move_by_pieces_1 (genfun, mode, data)
1541 rtx (*genfun) PARAMS ((rtx, ...));
1542 enum machine_mode mode;
1543 struct move_by_pieces *data;
1544 {
1545 unsigned int size = GET_MODE_SIZE (mode);
1546 rtx to1, from1;
1547
1548 while (data->len >= size)
1549 {
1550 if (data->reverse)
1551 data->offset -= size;
1552
1553 if (data->autinc_to)
1554 {
1555 to1 = gen_rtx_MEM (mode, data->to_addr);
1556 MEM_COPY_ATTRIBUTES (to1, data->to);
1557 }
1558 else
1559 to1 = change_address (data->to, mode,
1560 plus_constant (data->to_addr, data->offset));
1561
1562 if (data->autinc_from)
1563 {
1564 from1 = gen_rtx_MEM (mode, data->from_addr);
1565 MEM_COPY_ATTRIBUTES (from1, data->from);
1566 }
1567 else
1568 from1 = change_address (data->from, mode,
1569 plus_constant (data->from_addr, data->offset));
1570
1571 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1572 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1573 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1574 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1575
1576 emit_insn ((*genfun) (to1, from1));
1577
1578 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1579 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1580 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1581 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1582
1583 if (! data->reverse)
1584 data->offset += size;
1585
1586 data->len -= size;
1587 }
1588 }
1589 \f
1590 /* Emit code to move a block Y to a block X.
1591 This may be done with string-move instructions,
1592 with multiple scalar move instructions, or with a library call.
1593
1594 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1595 with mode BLKmode.
1596 SIZE is an rtx that says how long they are.
1597 ALIGN is the maximum alignment we can assume they have.
1598
1599 Return the address of the new block, if memcpy is called and returns it,
1600 0 otherwise. */
1601
1602 rtx
1603 emit_block_move (x, y, size, align)
1604 rtx x, y;
1605 rtx size;
1606 unsigned int align;
1607 {
1608 rtx retval = 0;
1609 #ifdef TARGET_MEM_FUNCTIONS
1610 static tree fn;
1611 tree call_expr, arg_list;
1612 #endif
1613
1614 if (GET_MODE (x) != BLKmode)
1615 abort ();
1616
1617 if (GET_MODE (y) != BLKmode)
1618 abort ();
1619
1620 x = protect_from_queue (x, 1);
1621 y = protect_from_queue (y, 0);
1622 size = protect_from_queue (size, 0);
1623
1624 if (GET_CODE (x) != MEM)
1625 abort ();
1626 if (GET_CODE (y) != MEM)
1627 abort ();
1628 if (size == 0)
1629 abort ();
1630
1631 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1632 move_by_pieces (x, y, INTVAL (size), align);
1633 else
1634 {
1635 /* Try the most limited insn first, because there's no point
1636 including more than one in the machine description unless
1637 the more limited one has some advantage. */
1638
1639 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1640 enum machine_mode mode;
1641
1642 /* Since this is a move insn, we don't care about volatility. */
1643 volatile_ok = 1;
1644
1645 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1646 mode = GET_MODE_WIDER_MODE (mode))
1647 {
1648 enum insn_code code = movstr_optab[(int) mode];
1649 insn_operand_predicate_fn pred;
1650
1651 if (code != CODE_FOR_nothing
1652 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1653 here because if SIZE is less than the mode mask, as it is
1654 returned by the macro, it will definitely be less than the
1655 actual mode mask. */
1656 && ((GET_CODE (size) == CONST_INT
1657 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1658 <= (GET_MODE_MASK (mode) >> 1)))
1659 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1660 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1661 || (*pred) (x, BLKmode))
1662 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1663 || (*pred) (y, BLKmode))
1664 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1665 || (*pred) (opalign, VOIDmode)))
1666 {
1667 rtx op2;
1668 rtx last = get_last_insn ();
1669 rtx pat;
1670
1671 op2 = convert_to_mode (mode, size, 1);
1672 pred = insn_data[(int) code].operand[2].predicate;
1673 if (pred != 0 && ! (*pred) (op2, mode))
1674 op2 = copy_to_mode_reg (mode, op2);
1675
1676 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1677 if (pat)
1678 {
1679 emit_insn (pat);
1680 volatile_ok = 0;
1681 return 0;
1682 }
1683 else
1684 delete_insns_since (last);
1685 }
1686 }
1687
1688 volatile_ok = 0;
1689
1690 /* X, Y, or SIZE may have been passed through protect_from_queue.
1691
1692 It is unsafe to save the value generated by protect_from_queue
1693 and reuse it later. Consider what happens if emit_queue is
1694 called before the return value from protect_from_queue is used.
1695
1696 Expansion of the CALL_EXPR below will call emit_queue before
1697 we are finished emitting RTL for argument setup. So if we are
1698 not careful we could get the wrong value for an argument.
1699
1700 To avoid this problem we go ahead and emit code to copy X, Y &
1701 SIZE into new pseudos. We can then place those new pseudos
1702 into an RTL_EXPR and use them later, even after a call to
1703 emit_queue.
1704
1705 Note this is not strictly needed for library calls since they
1706 do not call emit_queue before loading their arguments. However,
1707 we may need to have library calls call emit_queue in the future
1708 since failing to do so could cause problems for targets which
1709 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1710 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1711 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1712
1713 #ifdef TARGET_MEM_FUNCTIONS
1714 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1715 #else
1716 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1717 TREE_UNSIGNED (integer_type_node));
1718 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1719 #endif
1720
1721 #ifdef TARGET_MEM_FUNCTIONS
1722 /* It is incorrect to use the libcall calling conventions to call
1723 memcpy in this context.
1724
1725 This could be a user call to memcpy and the user may wish to
1726 examine the return value from memcpy.
1727
1728 For targets where libcalls and normal calls have different conventions
1729 for returning pointers, we could end up generating incorrect code.
1730
1731 So instead of using a libcall sequence we build up a suitable
1732 CALL_EXPR and expand the call in the normal fashion. */
1733 if (fn == NULL_TREE)
1734 {
1735 tree fntype;
1736
1737 /* This was copied from except.c, I don't know if all this is
1738 necessary in this context or not. */
1739 fn = get_identifier ("memcpy");
1740 push_obstacks_nochange ();
1741 end_temporary_allocation ();
1742 fntype = build_pointer_type (void_type_node);
1743 fntype = build_function_type (fntype, NULL_TREE);
1744 fn = build_decl (FUNCTION_DECL, fn, fntype);
1745 ggc_add_tree_root (&fn, 1);
1746 DECL_EXTERNAL (fn) = 1;
1747 TREE_PUBLIC (fn) = 1;
1748 DECL_ARTIFICIAL (fn) = 1;
1749 make_decl_rtl (fn, NULL_PTR, 1);
1750 assemble_external (fn);
1751 pop_obstacks ();
1752 }
1753
1754 /* We need to make an argument list for the function call.
1755
1756 memcpy has three arguments, the first two are void * addresses and
1757 the last is a size_t byte count for the copy. */
1758 arg_list
1759 = build_tree_list (NULL_TREE,
1760 make_tree (build_pointer_type (void_type_node), x));
1761 TREE_CHAIN (arg_list)
1762 = build_tree_list (NULL_TREE,
1763 make_tree (build_pointer_type (void_type_node), y));
1764 TREE_CHAIN (TREE_CHAIN (arg_list))
1765 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1766 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1767
1768 /* Now we have to build up the CALL_EXPR itself. */
1769 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1770 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1771 call_expr, arg_list, NULL_TREE);
1772 TREE_SIDE_EFFECTS (call_expr) = 1;
1773
1774 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1775 #else
1776 emit_library_call (bcopy_libfunc, 0,
1777 VOIDmode, 3, y, Pmode, x, Pmode,
1778 convert_to_mode (TYPE_MODE (integer_type_node), size,
1779 TREE_UNSIGNED (integer_type_node)),
1780 TYPE_MODE (integer_type_node));
1781 #endif
1782 }
1783
1784 return retval;
1785 }
1786 \f
1787 /* Copy all or part of a value X into registers starting at REGNO.
1788 The number of registers to be filled is NREGS. */
1789
1790 void
1791 move_block_to_reg (regno, x, nregs, mode)
1792 int regno;
1793 rtx x;
1794 int nregs;
1795 enum machine_mode mode;
1796 {
1797 int i;
1798 #ifdef HAVE_load_multiple
1799 rtx pat;
1800 rtx last;
1801 #endif
1802
1803 if (nregs == 0)
1804 return;
1805
1806 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1807 x = validize_mem (force_const_mem (mode, x));
1808
1809 /* See if the machine can do this with a load multiple insn. */
1810 #ifdef HAVE_load_multiple
1811 if (HAVE_load_multiple)
1812 {
1813 last = get_last_insn ();
1814 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1815 GEN_INT (nregs));
1816 if (pat)
1817 {
1818 emit_insn (pat);
1819 return;
1820 }
1821 else
1822 delete_insns_since (last);
1823 }
1824 #endif
1825
1826 for (i = 0; i < nregs; i++)
1827 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1828 operand_subword_force (x, i, mode));
1829 }
1830
1831 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1832 The number of registers to be filled is NREGS. SIZE indicates the number
1833 of bytes in the object X. */
1834
1835 void
1836 move_block_from_reg (regno, x, nregs, size)
1837 int regno;
1838 rtx x;
1839 int nregs;
1840 int size;
1841 {
1842 int i;
1843 #ifdef HAVE_store_multiple
1844 rtx pat;
1845 rtx last;
1846 #endif
1847 enum machine_mode mode;
1848
1849 /* If SIZE is that of a mode no bigger than a word, just use that
1850 mode's store operation. */
1851 if (size <= UNITS_PER_WORD
1852 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1853 {
1854 emit_move_insn (change_address (x, mode, NULL),
1855 gen_rtx_REG (mode, regno));
1856 return;
1857 }
1858
1859 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1860 to the left before storing to memory. Note that the previous test
1861 doesn't handle all cases (e.g. SIZE == 3). */
1862 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1863 {
1864 rtx tem = operand_subword (x, 0, 1, BLKmode);
1865 rtx shift;
1866
1867 if (tem == 0)
1868 abort ();
1869
1870 shift = expand_shift (LSHIFT_EXPR, word_mode,
1871 gen_rtx_REG (word_mode, regno),
1872 build_int_2 ((UNITS_PER_WORD - size)
1873 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1874 emit_move_insn (tem, shift);
1875 return;
1876 }
1877
1878 /* See if the machine can do this with a store multiple insn. */
1879 #ifdef HAVE_store_multiple
1880 if (HAVE_store_multiple)
1881 {
1882 last = get_last_insn ();
1883 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1884 GEN_INT (nregs));
1885 if (pat)
1886 {
1887 emit_insn (pat);
1888 return;
1889 }
1890 else
1891 delete_insns_since (last);
1892 }
1893 #endif
1894
1895 for (i = 0; i < nregs; i++)
1896 {
1897 rtx tem = operand_subword (x, i, 1, BLKmode);
1898
1899 if (tem == 0)
1900 abort ();
1901
1902 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1903 }
1904 }
1905
1906 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1907 registers represented by a PARALLEL. SSIZE represents the total size of
1908 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1909 SRC in bits. */
1910 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1911 the balance will be in what would be the low-order memory addresses, i.e.
1912 left justified for big endian, right justified for little endian. This
1913 happens to be true for the targets currently using this support. If this
1914 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1915 would be needed. */
1916
1917 void
1918 emit_group_load (dst, orig_src, ssize, align)
1919 rtx dst, orig_src;
1920 unsigned int align;
1921 int ssize;
1922 {
1923 rtx *tmps, src;
1924 int start, i;
1925
1926 if (GET_CODE (dst) != PARALLEL)
1927 abort ();
1928
1929 /* Check for a NULL entry, used to indicate that the parameter goes
1930 both on the stack and in registers. */
1931 if (XEXP (XVECEXP (dst, 0, 0), 0))
1932 start = 0;
1933 else
1934 start = 1;
1935
1936 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1937
1938 /* If we won't be loading directly from memory, protect the real source
1939 from strange tricks we might play. */
1940 src = orig_src;
1941 if (GET_CODE (src) != MEM)
1942 {
1943 if (GET_MODE (src) == VOIDmode)
1944 src = gen_reg_rtx (GET_MODE (dst));
1945 else
1946 src = gen_reg_rtx (GET_MODE (orig_src));
1947 emit_move_insn (src, orig_src);
1948 }
1949
1950 /* Process the pieces. */
1951 for (i = start; i < XVECLEN (dst, 0); i++)
1952 {
1953 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1954 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1955 unsigned int bytelen = GET_MODE_SIZE (mode);
1956 int shift = 0;
1957
1958 /* Handle trailing fragments that run over the size of the struct. */
1959 if (ssize >= 0 && bytepos + bytelen > ssize)
1960 {
1961 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1962 bytelen = ssize - bytepos;
1963 if (bytelen <= 0)
1964 abort ();
1965 }
1966
1967 /* Optimize the access just a bit. */
1968 if (GET_CODE (src) == MEM
1969 && align >= GET_MODE_ALIGNMENT (mode)
1970 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1971 && bytelen == GET_MODE_SIZE (mode))
1972 {
1973 tmps[i] = gen_reg_rtx (mode);
1974 emit_move_insn (tmps[i],
1975 change_address (src, mode,
1976 plus_constant (XEXP (src, 0),
1977 bytepos)));
1978 }
1979 else if (GET_CODE (src) == CONCAT)
1980 {
1981 if (bytepos == 0
1982 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1983 tmps[i] = XEXP (src, 0);
1984 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1985 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1986 tmps[i] = XEXP (src, 1);
1987 else
1988 abort ();
1989 }
1990 else
1991 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1992 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1993 mode, mode, align, ssize);
1994
1995 if (BYTES_BIG_ENDIAN && shift)
1996 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1997 tmps[i], 0, OPTAB_WIDEN);
1998 }
1999
2000 emit_queue ();
2001
2002 /* Copy the extracted pieces into the proper (probable) hard regs. */
2003 for (i = start; i < XVECLEN (dst, 0); i++)
2004 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2005 }
2006
2007 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2008 registers represented by a PARALLEL. SSIZE represents the total size of
2009 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2010
2011 void
2012 emit_group_store (orig_dst, src, ssize, align)
2013 rtx orig_dst, src;
2014 int ssize;
2015 unsigned int align;
2016 {
2017 rtx *tmps, dst;
2018 int start, i;
2019
2020 if (GET_CODE (src) != PARALLEL)
2021 abort ();
2022
2023 /* Check for a NULL entry, used to indicate that the parameter goes
2024 both on the stack and in registers. */
2025 if (XEXP (XVECEXP (src, 0, 0), 0))
2026 start = 0;
2027 else
2028 start = 1;
2029
2030 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2031
2032 /* Copy the (probable) hard regs into pseudos. */
2033 for (i = start; i < XVECLEN (src, 0); i++)
2034 {
2035 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2036 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2037 emit_move_insn (tmps[i], reg);
2038 }
2039 emit_queue ();
2040
2041 /* If we won't be storing directly into memory, protect the real destination
2042 from strange tricks we might play. */
2043 dst = orig_dst;
2044 if (GET_CODE (dst) == PARALLEL)
2045 {
2046 rtx temp;
2047
2048 /* We can get a PARALLEL dst if there is a conditional expression in
2049 a return statement. In that case, the dst and src are the same,
2050 so no action is necessary. */
2051 if (rtx_equal_p (dst, src))
2052 return;
2053
2054 /* It is unclear if we can ever reach here, but we may as well handle
2055 it. Allocate a temporary, and split this into a store/load to/from
2056 the temporary. */
2057
2058 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2059 emit_group_store (temp, src, ssize, align);
2060 emit_group_load (dst, temp, ssize, align);
2061 return;
2062 }
2063 else if (GET_CODE (dst) != MEM)
2064 {
2065 dst = gen_reg_rtx (GET_MODE (orig_dst));
2066 /* Make life a bit easier for combine. */
2067 emit_move_insn (dst, const0_rtx);
2068 }
2069 else if (! MEM_IN_STRUCT_P (dst))
2070 {
2071 /* store_bit_field requires that memory operations have
2072 mem_in_struct_p set; we might not. */
2073
2074 dst = copy_rtx (orig_dst);
2075 MEM_SET_IN_STRUCT_P (dst, 1);
2076 }
2077
2078 /* Process the pieces. */
2079 for (i = start; i < XVECLEN (src, 0); i++)
2080 {
2081 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2082 enum machine_mode mode = GET_MODE (tmps[i]);
2083 unsigned int bytelen = GET_MODE_SIZE (mode);
2084
2085 /* Handle trailing fragments that run over the size of the struct. */
2086 if (ssize >= 0 && bytepos + bytelen > ssize)
2087 {
2088 if (BYTES_BIG_ENDIAN)
2089 {
2090 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2091 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2092 tmps[i], 0, OPTAB_WIDEN);
2093 }
2094 bytelen = ssize - bytepos;
2095 }
2096
2097 /* Optimize the access just a bit. */
2098 if (GET_CODE (dst) == MEM
2099 && align >= GET_MODE_ALIGNMENT (mode)
2100 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2101 && bytelen == GET_MODE_SIZE (mode))
2102 emit_move_insn (change_address (dst, mode,
2103 plus_constant (XEXP (dst, 0),
2104 bytepos)),
2105 tmps[i]);
2106 else
2107 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2108 mode, tmps[i], align, ssize);
2109 }
2110
2111 emit_queue ();
2112
2113 /* Copy from the pseudo into the (probable) hard reg. */
2114 if (GET_CODE (dst) == REG)
2115 emit_move_insn (orig_dst, dst);
2116 }
2117
2118 /* Generate code to copy a BLKmode object of TYPE out of a
2119 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2120 is null, a stack temporary is created. TGTBLK is returned.
2121
2122 The primary purpose of this routine is to handle functions
2123 that return BLKmode structures in registers. Some machines
2124 (the PA for example) want to return all small structures
2125 in registers regardless of the structure's alignment. */
2126
2127 rtx
2128 copy_blkmode_from_reg (tgtblk, srcreg, type)
2129 rtx tgtblk;
2130 rtx srcreg;
2131 tree type;
2132 {
2133 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2134 rtx src = NULL, dst = NULL;
2135 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2136 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2137
2138 if (tgtblk == 0)
2139 {
2140 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2141 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2142 preserve_temp_slots (tgtblk);
2143 }
2144
2145 /* This code assumes srcreg is at least a full word. If it isn't,
2146 copy it into a new pseudo which is a full word. */
2147 if (GET_MODE (srcreg) != BLKmode
2148 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2149 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2150
2151 /* Structures whose size is not a multiple of a word are aligned
2152 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2153 machine, this means we must skip the empty high order bytes when
2154 calculating the bit offset. */
2155 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2156 big_endian_correction
2157 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2158
2159 /* Copy the structure BITSIZE bites at a time.
2160
2161 We could probably emit more efficient code for machines which do not use
2162 strict alignment, but it doesn't seem worth the effort at the current
2163 time. */
2164 for (bitpos = 0, xbitpos = big_endian_correction;
2165 bitpos < bytes * BITS_PER_UNIT;
2166 bitpos += bitsize, xbitpos += bitsize)
2167 {
2168 /* We need a new source operand each time xbitpos is on a
2169 word boundary and when xbitpos == big_endian_correction
2170 (the first time through). */
2171 if (xbitpos % BITS_PER_WORD == 0
2172 || xbitpos == big_endian_correction)
2173 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2174
2175 /* We need a new destination operand each time bitpos is on
2176 a word boundary. */
2177 if (bitpos % BITS_PER_WORD == 0)
2178 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2179
2180 /* Use xbitpos for the source extraction (right justified) and
2181 xbitpos for the destination store (left justified). */
2182 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2183 extract_bit_field (src, bitsize,
2184 xbitpos % BITS_PER_WORD, 1,
2185 NULL_RTX, word_mode, word_mode,
2186 bitsize, BITS_PER_WORD),
2187 bitsize, BITS_PER_WORD);
2188 }
2189
2190 return tgtblk;
2191 }
2192
2193 /* Add a USE expression for REG to the (possibly empty) list pointed
2194 to by CALL_FUSAGE. REG must denote a hard register. */
2195
2196 void
2197 use_reg (call_fusage, reg)
2198 rtx *call_fusage, reg;
2199 {
2200 if (GET_CODE (reg) != REG
2201 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2202 abort ();
2203
2204 *call_fusage
2205 = gen_rtx_EXPR_LIST (VOIDmode,
2206 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2207 }
2208
2209 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2210 starting at REGNO. All of these registers must be hard registers. */
2211
2212 void
2213 use_regs (call_fusage, regno, nregs)
2214 rtx *call_fusage;
2215 int regno;
2216 int nregs;
2217 {
2218 int i;
2219
2220 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2221 abort ();
2222
2223 for (i = 0; i < nregs; i++)
2224 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2225 }
2226
2227 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2228 PARALLEL REGS. This is for calls that pass values in multiple
2229 non-contiguous locations. The Irix 6 ABI has examples of this. */
2230
2231 void
2232 use_group_regs (call_fusage, regs)
2233 rtx *call_fusage;
2234 rtx regs;
2235 {
2236 int i;
2237
2238 for (i = 0; i < XVECLEN (regs, 0); i++)
2239 {
2240 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2241
2242 /* A NULL entry means the parameter goes both on the stack and in
2243 registers. This can also be a MEM for targets that pass values
2244 partially on the stack and partially in registers. */
2245 if (reg != 0 && GET_CODE (reg) == REG)
2246 use_reg (call_fusage, reg);
2247 }
2248 }
2249 \f
2250 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2251 rtx with BLKmode). The caller must pass TO through protect_from_queue
2252 before calling. ALIGN is maximum alignment we can assume. */
2253
2254 static void
2255 clear_by_pieces (to, len, align)
2256 rtx to;
2257 unsigned HOST_WIDE_INT len;
2258 unsigned int align;
2259 {
2260 struct clear_by_pieces data;
2261 rtx to_addr = XEXP (to, 0);
2262 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2263 enum machine_mode mode = VOIDmode, tmode;
2264 enum insn_code icode;
2265
2266 data.offset = 0;
2267 data.to_addr = to_addr;
2268 data.to = to;
2269 data.autinc_to
2270 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2271 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2272
2273 data.explicit_inc_to = 0;
2274 data.reverse
2275 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2276 if (data.reverse)
2277 data.offset = len;
2278 data.len = len;
2279
2280 /* If copying requires more than two move insns,
2281 copy addresses to registers (to make displacements shorter)
2282 and use post-increment if available. */
2283 if (!data.autinc_to
2284 && move_by_pieces_ninsns (len, align) > 2)
2285 {
2286 /* Determine the main mode we'll be using. */
2287 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2288 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2289 if (GET_MODE_SIZE (tmode) < max_size)
2290 mode = tmode;
2291
2292 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2293 {
2294 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2295 data.autinc_to = 1;
2296 data.explicit_inc_to = -1;
2297 }
2298
2299 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2300 && ! data.autinc_to)
2301 {
2302 data.to_addr = copy_addr_to_reg (to_addr);
2303 data.autinc_to = 1;
2304 data.explicit_inc_to = 1;
2305 }
2306
2307 if ( !data.autinc_to && CONSTANT_P (to_addr))
2308 data.to_addr = copy_addr_to_reg (to_addr);
2309 }
2310
2311 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2312 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2313 align = MOVE_MAX * BITS_PER_UNIT;
2314
2315 /* First move what we can in the largest integer mode, then go to
2316 successively smaller modes. */
2317
2318 while (max_size > 1)
2319 {
2320 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2321 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2322 if (GET_MODE_SIZE (tmode) < max_size)
2323 mode = tmode;
2324
2325 if (mode == VOIDmode)
2326 break;
2327
2328 icode = mov_optab->handlers[(int) mode].insn_code;
2329 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2330 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2331
2332 max_size = GET_MODE_SIZE (mode);
2333 }
2334
2335 /* The code above should have handled everything. */
2336 if (data.len != 0)
2337 abort ();
2338 }
2339
2340 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2341 with move instructions for mode MODE. GENFUN is the gen_... function
2342 to make a move insn for that mode. DATA has all the other info. */
2343
2344 static void
2345 clear_by_pieces_1 (genfun, mode, data)
2346 rtx (*genfun) PARAMS ((rtx, ...));
2347 enum machine_mode mode;
2348 struct clear_by_pieces *data;
2349 {
2350 unsigned int size = GET_MODE_SIZE (mode);
2351 rtx to1;
2352
2353 while (data->len >= size)
2354 {
2355 if (data->reverse)
2356 data->offset -= size;
2357
2358 if (data->autinc_to)
2359 {
2360 to1 = gen_rtx_MEM (mode, data->to_addr);
2361 MEM_COPY_ATTRIBUTES (to1, data->to);
2362 }
2363 else
2364 to1 = change_address (data->to, mode,
2365 plus_constant (data->to_addr, data->offset));
2366
2367 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2368 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2369
2370 emit_insn ((*genfun) (to1, const0_rtx));
2371
2372 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2373 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2374
2375 if (! data->reverse)
2376 data->offset += size;
2377
2378 data->len -= size;
2379 }
2380 }
2381 \f
2382 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2383 its length in bytes and ALIGN is the maximum alignment we can is has.
2384
2385 If we call a function that returns the length of the block, return it. */
2386
2387 rtx
2388 clear_storage (object, size, align)
2389 rtx object;
2390 rtx size;
2391 unsigned int align;
2392 {
2393 #ifdef TARGET_MEM_FUNCTIONS
2394 static tree fn;
2395 tree call_expr, arg_list;
2396 #endif
2397 rtx retval = 0;
2398
2399 if (GET_MODE (object) == BLKmode)
2400 {
2401 object = protect_from_queue (object, 1);
2402 size = protect_from_queue (size, 0);
2403
2404 if (GET_CODE (size) == CONST_INT
2405 && MOVE_BY_PIECES_P (INTVAL (size), align))
2406 clear_by_pieces (object, INTVAL (size), align);
2407 else
2408 {
2409 /* Try the most limited insn first, because there's no point
2410 including more than one in the machine description unless
2411 the more limited one has some advantage. */
2412
2413 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2414 enum machine_mode mode;
2415
2416 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2417 mode = GET_MODE_WIDER_MODE (mode))
2418 {
2419 enum insn_code code = clrstr_optab[(int) mode];
2420 insn_operand_predicate_fn pred;
2421
2422 if (code != CODE_FOR_nothing
2423 /* We don't need MODE to be narrower than
2424 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2425 the mode mask, as it is returned by the macro, it will
2426 definitely be less than the actual mode mask. */
2427 && ((GET_CODE (size) == CONST_INT
2428 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2429 <= (GET_MODE_MASK (mode) >> 1)))
2430 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2431 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2432 || (*pred) (object, BLKmode))
2433 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2434 || (*pred) (opalign, VOIDmode)))
2435 {
2436 rtx op1;
2437 rtx last = get_last_insn ();
2438 rtx pat;
2439
2440 op1 = convert_to_mode (mode, size, 1);
2441 pred = insn_data[(int) code].operand[1].predicate;
2442 if (pred != 0 && ! (*pred) (op1, mode))
2443 op1 = copy_to_mode_reg (mode, op1);
2444
2445 pat = GEN_FCN ((int) code) (object, op1, opalign);
2446 if (pat)
2447 {
2448 emit_insn (pat);
2449 return 0;
2450 }
2451 else
2452 delete_insns_since (last);
2453 }
2454 }
2455
2456 /* OBJECT or SIZE may have been passed through protect_from_queue.
2457
2458 It is unsafe to save the value generated by protect_from_queue
2459 and reuse it later. Consider what happens if emit_queue is
2460 called before the return value from protect_from_queue is used.
2461
2462 Expansion of the CALL_EXPR below will call emit_queue before
2463 we are finished emitting RTL for argument setup. So if we are
2464 not careful we could get the wrong value for an argument.
2465
2466 To avoid this problem we go ahead and emit code to copy OBJECT
2467 and SIZE into new pseudos. We can then place those new pseudos
2468 into an RTL_EXPR and use them later, even after a call to
2469 emit_queue.
2470
2471 Note this is not strictly needed for library calls since they
2472 do not call emit_queue before loading their arguments. However,
2473 we may need to have library calls call emit_queue in the future
2474 since failing to do so could cause problems for targets which
2475 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2476 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2477
2478 #ifdef TARGET_MEM_FUNCTIONS
2479 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2480 #else
2481 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2482 TREE_UNSIGNED (integer_type_node));
2483 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2484 #endif
2485
2486 #ifdef TARGET_MEM_FUNCTIONS
2487 /* It is incorrect to use the libcall calling conventions to call
2488 memset in this context.
2489
2490 This could be a user call to memset and the user may wish to
2491 examine the return value from memset.
2492
2493 For targets where libcalls and normal calls have different
2494 conventions for returning pointers, we could end up generating
2495 incorrect code.
2496
2497 So instead of using a libcall sequence we build up a suitable
2498 CALL_EXPR and expand the call in the normal fashion. */
2499 if (fn == NULL_TREE)
2500 {
2501 tree fntype;
2502
2503 /* This was copied from except.c, I don't know if all this is
2504 necessary in this context or not. */
2505 fn = get_identifier ("memset");
2506 push_obstacks_nochange ();
2507 end_temporary_allocation ();
2508 fntype = build_pointer_type (void_type_node);
2509 fntype = build_function_type (fntype, NULL_TREE);
2510 fn = build_decl (FUNCTION_DECL, fn, fntype);
2511 ggc_add_tree_root (&fn, 1);
2512 DECL_EXTERNAL (fn) = 1;
2513 TREE_PUBLIC (fn) = 1;
2514 DECL_ARTIFICIAL (fn) = 1;
2515 make_decl_rtl (fn, NULL_PTR, 1);
2516 assemble_external (fn);
2517 pop_obstacks ();
2518 }
2519
2520 /* We need to make an argument list for the function call.
2521
2522 memset has three arguments, the first is a void * addresses, the
2523 second a integer with the initialization value, the last is a
2524 size_t byte count for the copy. */
2525 arg_list
2526 = build_tree_list (NULL_TREE,
2527 make_tree (build_pointer_type (void_type_node),
2528 object));
2529 TREE_CHAIN (arg_list)
2530 = build_tree_list (NULL_TREE,
2531 make_tree (integer_type_node, const0_rtx));
2532 TREE_CHAIN (TREE_CHAIN (arg_list))
2533 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2534 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2535
2536 /* Now we have to build up the CALL_EXPR itself. */
2537 call_expr = build1 (ADDR_EXPR,
2538 build_pointer_type (TREE_TYPE (fn)), fn);
2539 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2540 call_expr, arg_list, NULL_TREE);
2541 TREE_SIDE_EFFECTS (call_expr) = 1;
2542
2543 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2544 #else
2545 emit_library_call (bzero_libfunc, 0,
2546 VOIDmode, 2, object, Pmode, size,
2547 TYPE_MODE (integer_type_node));
2548 #endif
2549 }
2550 }
2551 else
2552 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2553
2554 return retval;
2555 }
2556
2557 /* Generate code to copy Y into X.
2558 Both Y and X must have the same mode, except that
2559 Y can be a constant with VOIDmode.
2560 This mode cannot be BLKmode; use emit_block_move for that.
2561
2562 Return the last instruction emitted. */
2563
2564 rtx
2565 emit_move_insn (x, y)
2566 rtx x, y;
2567 {
2568 enum machine_mode mode = GET_MODE (x);
2569
2570 x = protect_from_queue (x, 1);
2571 y = protect_from_queue (y, 0);
2572
2573 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2574 abort ();
2575
2576 /* Never force constant_p_rtx to memory. */
2577 if (GET_CODE (y) == CONSTANT_P_RTX)
2578 ;
2579 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2580 y = force_const_mem (mode, y);
2581
2582 /* If X or Y are memory references, verify that their addresses are valid
2583 for the machine. */
2584 if (GET_CODE (x) == MEM
2585 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2586 && ! push_operand (x, GET_MODE (x)))
2587 || (flag_force_addr
2588 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2589 x = change_address (x, VOIDmode, XEXP (x, 0));
2590
2591 if (GET_CODE (y) == MEM
2592 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2593 || (flag_force_addr
2594 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2595 y = change_address (y, VOIDmode, XEXP (y, 0));
2596
2597 if (mode == BLKmode)
2598 abort ();
2599
2600 return emit_move_insn_1 (x, y);
2601 }
2602
2603 /* Low level part of emit_move_insn.
2604 Called just like emit_move_insn, but assumes X and Y
2605 are basically valid. */
2606
2607 rtx
2608 emit_move_insn_1 (x, y)
2609 rtx x, y;
2610 {
2611 enum machine_mode mode = GET_MODE (x);
2612 enum machine_mode submode;
2613 enum mode_class class = GET_MODE_CLASS (mode);
2614 unsigned int i;
2615
2616 if (mode >= MAX_MACHINE_MODE)
2617 abort ();
2618
2619 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2620 return
2621 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2622
2623 /* Expand complex moves by moving real part and imag part, if possible. */
2624 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2625 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2626 * BITS_PER_UNIT),
2627 (class == MODE_COMPLEX_INT
2628 ? MODE_INT : MODE_FLOAT),
2629 0))
2630 && (mov_optab->handlers[(int) submode].insn_code
2631 != CODE_FOR_nothing))
2632 {
2633 /* Don't split destination if it is a stack push. */
2634 int stack = push_operand (x, GET_MODE (x));
2635
2636 /* If this is a stack, push the highpart first, so it
2637 will be in the argument order.
2638
2639 In that case, change_address is used only to convert
2640 the mode, not to change the address. */
2641 if (stack)
2642 {
2643 /* Note that the real part always precedes the imag part in memory
2644 regardless of machine's endianness. */
2645 #ifdef STACK_GROWS_DOWNWARD
2646 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2647 (gen_rtx_MEM (submode, XEXP (x, 0)),
2648 gen_imagpart (submode, y)));
2649 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2650 (gen_rtx_MEM (submode, XEXP (x, 0)),
2651 gen_realpart (submode, y)));
2652 #else
2653 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2654 (gen_rtx_MEM (submode, XEXP (x, 0)),
2655 gen_realpart (submode, y)));
2656 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2657 (gen_rtx_MEM (submode, XEXP (x, 0)),
2658 gen_imagpart (submode, y)));
2659 #endif
2660 }
2661 else
2662 {
2663 rtx realpart_x, realpart_y;
2664 rtx imagpart_x, imagpart_y;
2665
2666 /* If this is a complex value with each part being smaller than a
2667 word, the usual calling sequence will likely pack the pieces into
2668 a single register. Unfortunately, SUBREG of hard registers only
2669 deals in terms of words, so we have a problem converting input
2670 arguments to the CONCAT of two registers that is used elsewhere
2671 for complex values. If this is before reload, we can copy it into
2672 memory and reload. FIXME, we should see about using extract and
2673 insert on integer registers, but complex short and complex char
2674 variables should be rarely used. */
2675 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2676 && (reload_in_progress | reload_completed) == 0)
2677 {
2678 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2679 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2680
2681 if (packed_dest_p || packed_src_p)
2682 {
2683 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2684 ? MODE_FLOAT : MODE_INT);
2685
2686 enum machine_mode reg_mode =
2687 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2688
2689 if (reg_mode != BLKmode)
2690 {
2691 rtx mem = assign_stack_temp (reg_mode,
2692 GET_MODE_SIZE (mode), 0);
2693
2694 rtx cmem = change_address (mem, mode, NULL_RTX);
2695
2696 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2697
2698 if (packed_dest_p)
2699 {
2700 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2701 emit_move_insn_1 (cmem, y);
2702 return emit_move_insn_1 (sreg, mem);
2703 }
2704 else
2705 {
2706 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2707 emit_move_insn_1 (mem, sreg);
2708 return emit_move_insn_1 (x, cmem);
2709 }
2710 }
2711 }
2712 }
2713
2714 realpart_x = gen_realpart (submode, x);
2715 realpart_y = gen_realpart (submode, y);
2716 imagpart_x = gen_imagpart (submode, x);
2717 imagpart_y = gen_imagpart (submode, y);
2718
2719 /* Show the output dies here. This is necessary for SUBREGs
2720 of pseudos since we cannot track their lifetimes correctly;
2721 hard regs shouldn't appear here except as return values.
2722 We never want to emit such a clobber after reload. */
2723 if (x != y
2724 && ! (reload_in_progress || reload_completed)
2725 && (GET_CODE (realpart_x) == SUBREG
2726 || GET_CODE (imagpart_x) == SUBREG))
2727 {
2728 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2729 }
2730
2731 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2732 (realpart_x, realpart_y));
2733 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2734 (imagpart_x, imagpart_y));
2735 }
2736
2737 return get_last_insn ();
2738 }
2739
2740 /* This will handle any multi-word mode that lacks a move_insn pattern.
2741 However, you will get better code if you define such patterns,
2742 even if they must turn into multiple assembler instructions. */
2743 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2744 {
2745 rtx last_insn = 0;
2746 rtx seq, inner;
2747 int need_clobber;
2748
2749 #ifdef PUSH_ROUNDING
2750
2751 /* If X is a push on the stack, do the push now and replace
2752 X with a reference to the stack pointer. */
2753 if (push_operand (x, GET_MODE (x)))
2754 {
2755 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2756 x = change_address (x, VOIDmode, stack_pointer_rtx);
2757 }
2758 #endif
2759
2760 /* If we are in reload, see if either operand is a MEM whose address
2761 is scheduled for replacement. */
2762 if (reload_in_progress && GET_CODE (x) == MEM
2763 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2764 {
2765 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2766
2767 MEM_COPY_ATTRIBUTES (new, x);
2768 x = new;
2769 }
2770 if (reload_in_progress && GET_CODE (y) == MEM
2771 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2772 {
2773 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2774
2775 MEM_COPY_ATTRIBUTES (new, y);
2776 y = new;
2777 }
2778
2779 start_sequence ();
2780
2781 need_clobber = 0;
2782 for (i = 0;
2783 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2784 i++)
2785 {
2786 rtx xpart = operand_subword (x, i, 1, mode);
2787 rtx ypart = operand_subword (y, i, 1, mode);
2788
2789 /* If we can't get a part of Y, put Y into memory if it is a
2790 constant. Otherwise, force it into a register. If we still
2791 can't get a part of Y, abort. */
2792 if (ypart == 0 && CONSTANT_P (y))
2793 {
2794 y = force_const_mem (mode, y);
2795 ypart = operand_subword (y, i, 1, mode);
2796 }
2797 else if (ypart == 0)
2798 ypart = operand_subword_force (y, i, mode);
2799
2800 if (xpart == 0 || ypart == 0)
2801 abort ();
2802
2803 need_clobber |= (GET_CODE (xpart) == SUBREG);
2804
2805 last_insn = emit_move_insn (xpart, ypart);
2806 }
2807
2808 seq = gen_sequence ();
2809 end_sequence ();
2810
2811 /* Show the output dies here. This is necessary for SUBREGs
2812 of pseudos since we cannot track their lifetimes correctly;
2813 hard regs shouldn't appear here except as return values.
2814 We never want to emit such a clobber after reload. */
2815 if (x != y
2816 && ! (reload_in_progress || reload_completed)
2817 && need_clobber != 0)
2818 {
2819 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2820 }
2821
2822 emit_insn (seq);
2823
2824 return last_insn;
2825 }
2826 else
2827 abort ();
2828 }
2829 \f
2830 /* Pushing data onto the stack. */
2831
2832 /* Push a block of length SIZE (perhaps variable)
2833 and return an rtx to address the beginning of the block.
2834 Note that it is not possible for the value returned to be a QUEUED.
2835 The value may be virtual_outgoing_args_rtx.
2836
2837 EXTRA is the number of bytes of padding to push in addition to SIZE.
2838 BELOW nonzero means this padding comes at low addresses;
2839 otherwise, the padding comes at high addresses. */
2840
2841 rtx
2842 push_block (size, extra, below)
2843 rtx size;
2844 int extra, below;
2845 {
2846 register rtx temp;
2847
2848 size = convert_modes (Pmode, ptr_mode, size, 1);
2849 if (CONSTANT_P (size))
2850 anti_adjust_stack (plus_constant (size, extra));
2851 else if (GET_CODE (size) == REG && extra == 0)
2852 anti_adjust_stack (size);
2853 else
2854 {
2855 temp = copy_to_mode_reg (Pmode, size);
2856 if (extra != 0)
2857 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2858 temp, 0, OPTAB_LIB_WIDEN);
2859 anti_adjust_stack (temp);
2860 }
2861
2862 #ifndef STACK_GROWS_DOWNWARD
2863 #ifdef ARGS_GROW_DOWNWARD
2864 if (!ACCUMULATE_OUTGOING_ARGS)
2865 #else
2866 if (0)
2867 #endif
2868 #else
2869 if (1)
2870 #endif
2871 {
2872 /* Return the lowest stack address when STACK or ARGS grow downward and
2873 we are not aaccumulating outgoing arguments (the c4x port uses such
2874 conventions). */
2875 temp = virtual_outgoing_args_rtx;
2876 if (extra != 0 && below)
2877 temp = plus_constant (temp, extra);
2878 }
2879 else
2880 {
2881 if (GET_CODE (size) == CONST_INT)
2882 temp = plus_constant (virtual_outgoing_args_rtx,
2883 -INTVAL (size) - (below ? 0 : extra));
2884 else if (extra != 0 && !below)
2885 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2886 negate_rtx (Pmode, plus_constant (size, extra)));
2887 else
2888 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2889 negate_rtx (Pmode, size));
2890 }
2891
2892 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2893 }
2894
2895 rtx
2896 gen_push_operand ()
2897 {
2898 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2899 }
2900
2901 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2902 block of SIZE bytes. */
2903
2904 static rtx
2905 get_push_address (size)
2906 int size;
2907 {
2908 register rtx temp;
2909
2910 if (STACK_PUSH_CODE == POST_DEC)
2911 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2912 else if (STACK_PUSH_CODE == POST_INC)
2913 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2914 else
2915 temp = stack_pointer_rtx;
2916
2917 return copy_to_reg (temp);
2918 }
2919
2920 /* Generate code to push X onto the stack, assuming it has mode MODE and
2921 type TYPE.
2922 MODE is redundant except when X is a CONST_INT (since they don't
2923 carry mode info).
2924 SIZE is an rtx for the size of data to be copied (in bytes),
2925 needed only if X is BLKmode.
2926
2927 ALIGN is maximum alignment we can assume.
2928
2929 If PARTIAL and REG are both nonzero, then copy that many of the first
2930 words of X into registers starting with REG, and push the rest of X.
2931 The amount of space pushed is decreased by PARTIAL words,
2932 rounded *down* to a multiple of PARM_BOUNDARY.
2933 REG must be a hard register in this case.
2934 If REG is zero but PARTIAL is not, take any all others actions for an
2935 argument partially in registers, but do not actually load any
2936 registers.
2937
2938 EXTRA is the amount in bytes of extra space to leave next to this arg.
2939 This is ignored if an argument block has already been allocated.
2940
2941 On a machine that lacks real push insns, ARGS_ADDR is the address of
2942 the bottom of the argument block for this call. We use indexing off there
2943 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2944 argument block has not been preallocated.
2945
2946 ARGS_SO_FAR is the size of args previously pushed for this call.
2947
2948 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2949 for arguments passed in registers. If nonzero, it will be the number
2950 of bytes required. */
2951
2952 void
2953 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2954 args_addr, args_so_far, reg_parm_stack_space,
2955 alignment_pad)
2956 register rtx x;
2957 enum machine_mode mode;
2958 tree type;
2959 rtx size;
2960 unsigned int align;
2961 int partial;
2962 rtx reg;
2963 int extra;
2964 rtx args_addr;
2965 rtx args_so_far;
2966 int reg_parm_stack_space;
2967 rtx alignment_pad;
2968 {
2969 rtx xinner;
2970 enum direction stack_direction
2971 #ifdef STACK_GROWS_DOWNWARD
2972 = downward;
2973 #else
2974 = upward;
2975 #endif
2976
2977 /* Decide where to pad the argument: `downward' for below,
2978 `upward' for above, or `none' for don't pad it.
2979 Default is below for small data on big-endian machines; else above. */
2980 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2981
2982 /* Invert direction if stack is post-update. */
2983 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2984 if (where_pad != none)
2985 where_pad = (where_pad == downward ? upward : downward);
2986
2987 xinner = x = protect_from_queue (x, 0);
2988
2989 if (mode == BLKmode)
2990 {
2991 /* Copy a block into the stack, entirely or partially. */
2992
2993 register rtx temp;
2994 int used = partial * UNITS_PER_WORD;
2995 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2996 int skip;
2997
2998 if (size == 0)
2999 abort ();
3000
3001 used -= offset;
3002
3003 /* USED is now the # of bytes we need not copy to the stack
3004 because registers will take care of them. */
3005
3006 if (partial != 0)
3007 xinner = change_address (xinner, BLKmode,
3008 plus_constant (XEXP (xinner, 0), used));
3009
3010 /* If the partial register-part of the arg counts in its stack size,
3011 skip the part of stack space corresponding to the registers.
3012 Otherwise, start copying to the beginning of the stack space,
3013 by setting SKIP to 0. */
3014 skip = (reg_parm_stack_space == 0) ? 0 : used;
3015
3016 #ifdef PUSH_ROUNDING
3017 /* Do it with several push insns if that doesn't take lots of insns
3018 and if there is no difficulty with push insns that skip bytes
3019 on the stack for alignment purposes. */
3020 if (args_addr == 0
3021 && PUSH_ARGS
3022 && GET_CODE (size) == CONST_INT
3023 && skip == 0
3024 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3025 /* Here we avoid the case of a structure whose weak alignment
3026 forces many pushes of a small amount of data,
3027 and such small pushes do rounding that causes trouble. */
3028 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3029 || align >= BIGGEST_ALIGNMENT
3030 || PUSH_ROUNDING (align) == align)
3031 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3032 {
3033 /* Push padding now if padding above and stack grows down,
3034 or if padding below and stack grows up.
3035 But if space already allocated, this has already been done. */
3036 if (extra && args_addr == 0
3037 && where_pad != none && where_pad != stack_direction)
3038 anti_adjust_stack (GEN_INT (extra));
3039
3040 stack_pointer_delta += INTVAL (size) - used;
3041 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3042 INTVAL (size) - used, align);
3043
3044 if (current_function_check_memory_usage && ! in_check_memory_usage)
3045 {
3046 rtx temp;
3047
3048 in_check_memory_usage = 1;
3049 temp = get_push_address (INTVAL (size) - used);
3050 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3051 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3052 temp, Pmode,
3053 XEXP (xinner, 0), Pmode,
3054 GEN_INT (INTVAL (size) - used),
3055 TYPE_MODE (sizetype));
3056 else
3057 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3058 temp, Pmode,
3059 GEN_INT (INTVAL (size) - used),
3060 TYPE_MODE (sizetype),
3061 GEN_INT (MEMORY_USE_RW),
3062 TYPE_MODE (integer_type_node));
3063 in_check_memory_usage = 0;
3064 }
3065 }
3066 else
3067 #endif /* PUSH_ROUNDING */
3068 {
3069 rtx target;
3070
3071 /* Otherwise make space on the stack and copy the data
3072 to the address of that space. */
3073
3074 /* Deduct words put into registers from the size we must copy. */
3075 if (partial != 0)
3076 {
3077 if (GET_CODE (size) == CONST_INT)
3078 size = GEN_INT (INTVAL (size) - used);
3079 else
3080 size = expand_binop (GET_MODE (size), sub_optab, size,
3081 GEN_INT (used), NULL_RTX, 0,
3082 OPTAB_LIB_WIDEN);
3083 }
3084
3085 /* Get the address of the stack space.
3086 In this case, we do not deal with EXTRA separately.
3087 A single stack adjust will do. */
3088 if (! args_addr)
3089 {
3090 temp = push_block (size, extra, where_pad == downward);
3091 extra = 0;
3092 }
3093 else if (GET_CODE (args_so_far) == CONST_INT)
3094 temp = memory_address (BLKmode,
3095 plus_constant (args_addr,
3096 skip + INTVAL (args_so_far)));
3097 else
3098 temp = memory_address (BLKmode,
3099 plus_constant (gen_rtx_PLUS (Pmode,
3100 args_addr,
3101 args_so_far),
3102 skip));
3103 if (current_function_check_memory_usage && ! in_check_memory_usage)
3104 {
3105 in_check_memory_usage = 1;
3106 target = copy_to_reg (temp);
3107 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3108 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3109 target, Pmode,
3110 XEXP (xinner, 0), Pmode,
3111 size, TYPE_MODE (sizetype));
3112 else
3113 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3114 target, Pmode,
3115 size, TYPE_MODE (sizetype),
3116 GEN_INT (MEMORY_USE_RW),
3117 TYPE_MODE (integer_type_node));
3118 in_check_memory_usage = 0;
3119 }
3120
3121 target = gen_rtx_MEM (BLKmode, temp);
3122
3123 if (type != 0)
3124 {
3125 set_mem_attributes (target, type, 1);
3126 /* Function incoming arguments may overlap with sibling call
3127 outgoing arguments and we cannot allow reordering of reads
3128 from function arguments with stores to outgoing arguments
3129 of sibling calls. */
3130 MEM_ALIAS_SET (target) = 0;
3131 }
3132
3133 /* TEMP is the address of the block. Copy the data there. */
3134 if (GET_CODE (size) == CONST_INT
3135 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3136 {
3137 move_by_pieces (target, xinner, INTVAL (size), align);
3138 goto ret;
3139 }
3140 else
3141 {
3142 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3143 enum machine_mode mode;
3144
3145 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3146 mode != VOIDmode;
3147 mode = GET_MODE_WIDER_MODE (mode))
3148 {
3149 enum insn_code code = movstr_optab[(int) mode];
3150 insn_operand_predicate_fn pred;
3151
3152 if (code != CODE_FOR_nothing
3153 && ((GET_CODE (size) == CONST_INT
3154 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3155 <= (GET_MODE_MASK (mode) >> 1)))
3156 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3157 && (!(pred = insn_data[(int) code].operand[0].predicate)
3158 || ((*pred) (target, BLKmode)))
3159 && (!(pred = insn_data[(int) code].operand[1].predicate)
3160 || ((*pred) (xinner, BLKmode)))
3161 && (!(pred = insn_data[(int) code].operand[3].predicate)
3162 || ((*pred) (opalign, VOIDmode))))
3163 {
3164 rtx op2 = convert_to_mode (mode, size, 1);
3165 rtx last = get_last_insn ();
3166 rtx pat;
3167
3168 pred = insn_data[(int) code].operand[2].predicate;
3169 if (pred != 0 && ! (*pred) (op2, mode))
3170 op2 = copy_to_mode_reg (mode, op2);
3171
3172 pat = GEN_FCN ((int) code) (target, xinner,
3173 op2, opalign);
3174 if (pat)
3175 {
3176 emit_insn (pat);
3177 goto ret;
3178 }
3179 else
3180 delete_insns_since (last);
3181 }
3182 }
3183 }
3184
3185 if (!ACCUMULATE_OUTGOING_ARGS)
3186 {
3187 /* If the source is referenced relative to the stack pointer,
3188 copy it to another register to stabilize it. We do not need
3189 to do this if we know that we won't be changing sp. */
3190
3191 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3192 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3193 temp = copy_to_reg (temp);
3194 }
3195
3196 /* Make inhibit_defer_pop nonzero around the library call
3197 to force it to pop the bcopy-arguments right away. */
3198 NO_DEFER_POP;
3199 #ifdef TARGET_MEM_FUNCTIONS
3200 emit_library_call (memcpy_libfunc, 0,
3201 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3202 convert_to_mode (TYPE_MODE (sizetype),
3203 size, TREE_UNSIGNED (sizetype)),
3204 TYPE_MODE (sizetype));
3205 #else
3206 emit_library_call (bcopy_libfunc, 0,
3207 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3208 convert_to_mode (TYPE_MODE (integer_type_node),
3209 size,
3210 TREE_UNSIGNED (integer_type_node)),
3211 TYPE_MODE (integer_type_node));
3212 #endif
3213 OK_DEFER_POP;
3214 }
3215 }
3216 else if (partial > 0)
3217 {
3218 /* Scalar partly in registers. */
3219
3220 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3221 int i;
3222 int not_stack;
3223 /* # words of start of argument
3224 that we must make space for but need not store. */
3225 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3226 int args_offset = INTVAL (args_so_far);
3227 int skip;
3228
3229 /* Push padding now if padding above and stack grows down,
3230 or if padding below and stack grows up.
3231 But if space already allocated, this has already been done. */
3232 if (extra && args_addr == 0
3233 && where_pad != none && where_pad != stack_direction)
3234 anti_adjust_stack (GEN_INT (extra));
3235
3236 /* If we make space by pushing it, we might as well push
3237 the real data. Otherwise, we can leave OFFSET nonzero
3238 and leave the space uninitialized. */
3239 if (args_addr == 0)
3240 offset = 0;
3241
3242 /* Now NOT_STACK gets the number of words that we don't need to
3243 allocate on the stack. */
3244 not_stack = partial - offset;
3245
3246 /* If the partial register-part of the arg counts in its stack size,
3247 skip the part of stack space corresponding to the registers.
3248 Otherwise, start copying to the beginning of the stack space,
3249 by setting SKIP to 0. */
3250 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3251
3252 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3253 x = validize_mem (force_const_mem (mode, x));
3254
3255 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3256 SUBREGs of such registers are not allowed. */
3257 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3258 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3259 x = copy_to_reg (x);
3260
3261 /* Loop over all the words allocated on the stack for this arg. */
3262 /* We can do it by words, because any scalar bigger than a word
3263 has a size a multiple of a word. */
3264 #ifndef PUSH_ARGS_REVERSED
3265 for (i = not_stack; i < size; i++)
3266 #else
3267 for (i = size - 1; i >= not_stack; i--)
3268 #endif
3269 if (i >= not_stack + offset)
3270 emit_push_insn (operand_subword_force (x, i, mode),
3271 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3272 0, args_addr,
3273 GEN_INT (args_offset + ((i - not_stack + skip)
3274 * UNITS_PER_WORD)),
3275 reg_parm_stack_space, alignment_pad);
3276 }
3277 else
3278 {
3279 rtx addr;
3280 rtx target = NULL_RTX;
3281 rtx dest;
3282
3283 /* Push padding now if padding above and stack grows down,
3284 or if padding below and stack grows up.
3285 But if space already allocated, this has already been done. */
3286 if (extra && args_addr == 0
3287 && where_pad != none && where_pad != stack_direction)
3288 anti_adjust_stack (GEN_INT (extra));
3289
3290 #ifdef PUSH_ROUNDING
3291 if (args_addr == 0 && PUSH_ARGS)
3292 {
3293 addr = gen_push_operand ();
3294 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3295 }
3296 else
3297 #endif
3298 {
3299 if (GET_CODE (args_so_far) == CONST_INT)
3300 addr
3301 = memory_address (mode,
3302 plus_constant (args_addr,
3303 INTVAL (args_so_far)));
3304 else
3305 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3306 args_so_far));
3307 target = addr;
3308 }
3309
3310 dest = gen_rtx_MEM (mode, addr);
3311 if (type != 0)
3312 {
3313 set_mem_attributes (dest, type, 1);
3314 /* Function incoming arguments may overlap with sibling call
3315 outgoing arguments and we cannot allow reordering of reads
3316 from function arguments with stores to outgoing arguments
3317 of sibling calls. */
3318 MEM_ALIAS_SET (dest) = 0;
3319 }
3320
3321 emit_move_insn (dest, x);
3322
3323 if (current_function_check_memory_usage && ! in_check_memory_usage)
3324 {
3325 in_check_memory_usage = 1;
3326 if (target == 0)
3327 target = get_push_address (GET_MODE_SIZE (mode));
3328
3329 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3330 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3331 target, Pmode,
3332 XEXP (x, 0), Pmode,
3333 GEN_INT (GET_MODE_SIZE (mode)),
3334 TYPE_MODE (sizetype));
3335 else
3336 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3337 target, Pmode,
3338 GEN_INT (GET_MODE_SIZE (mode)),
3339 TYPE_MODE (sizetype),
3340 GEN_INT (MEMORY_USE_RW),
3341 TYPE_MODE (integer_type_node));
3342 in_check_memory_usage = 0;
3343 }
3344 }
3345
3346 ret:
3347 /* If part should go in registers, copy that part
3348 into the appropriate registers. Do this now, at the end,
3349 since mem-to-mem copies above may do function calls. */
3350 if (partial > 0 && reg != 0)
3351 {
3352 /* Handle calls that pass values in multiple non-contiguous locations.
3353 The Irix 6 ABI has examples of this. */
3354 if (GET_CODE (reg) == PARALLEL)
3355 emit_group_load (reg, x, -1, align); /* ??? size? */
3356 else
3357 move_block_to_reg (REGNO (reg), x, partial, mode);
3358 }
3359
3360 if (extra && args_addr == 0 && where_pad == stack_direction)
3361 anti_adjust_stack (GEN_INT (extra));
3362
3363 if (alignment_pad && args_addr == 0)
3364 anti_adjust_stack (alignment_pad);
3365 }
3366 \f
3367 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3368 operations. */
3369
3370 static rtx
3371 get_subtarget (x)
3372 rtx x;
3373 {
3374 return ((x == 0
3375 /* Only registers can be subtargets. */
3376 || GET_CODE (x) != REG
3377 /* If the register is readonly, it can't be set more than once. */
3378 || RTX_UNCHANGING_P (x)
3379 /* Don't use hard regs to avoid extending their life. */
3380 || REGNO (x) < FIRST_PSEUDO_REGISTER
3381 /* Avoid subtargets inside loops,
3382 since they hide some invariant expressions. */
3383 || preserve_subexpressions_p ())
3384 ? 0 : x);
3385 }
3386
3387 /* Expand an assignment that stores the value of FROM into TO.
3388 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3389 (This may contain a QUEUED rtx;
3390 if the value is constant, this rtx is a constant.)
3391 Otherwise, the returned value is NULL_RTX.
3392
3393 SUGGEST_REG is no longer actually used.
3394 It used to mean, copy the value through a register
3395 and return that register, if that is possible.
3396 We now use WANT_VALUE to decide whether to do this. */
3397
3398 rtx
3399 expand_assignment (to, from, want_value, suggest_reg)
3400 tree to, from;
3401 int want_value;
3402 int suggest_reg ATTRIBUTE_UNUSED;
3403 {
3404 register rtx to_rtx = 0;
3405 rtx result;
3406
3407 /* Don't crash if the lhs of the assignment was erroneous. */
3408
3409 if (TREE_CODE (to) == ERROR_MARK)
3410 {
3411 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3412 return want_value ? result : NULL_RTX;
3413 }
3414
3415 /* Assignment of a structure component needs special treatment
3416 if the structure component's rtx is not simply a MEM.
3417 Assignment of an array element at a constant index, and assignment of
3418 an array element in an unaligned packed structure field, has the same
3419 problem. */
3420
3421 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3422 || TREE_CODE (to) == ARRAY_REF)
3423 {
3424 enum machine_mode mode1;
3425 HOST_WIDE_INT bitsize, bitpos;
3426 tree offset;
3427 int unsignedp;
3428 int volatilep = 0;
3429 tree tem;
3430 unsigned int alignment;
3431
3432 push_temp_slots ();
3433 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3434 &unsignedp, &volatilep, &alignment);
3435
3436 /* If we are going to use store_bit_field and extract_bit_field,
3437 make sure to_rtx will be safe for multiple use. */
3438
3439 if (mode1 == VOIDmode && want_value)
3440 tem = stabilize_reference (tem);
3441
3442 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3443 if (offset != 0)
3444 {
3445 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3446
3447 if (GET_CODE (to_rtx) != MEM)
3448 abort ();
3449
3450 if (GET_MODE (offset_rtx) != ptr_mode)
3451 {
3452 #ifdef POINTERS_EXTEND_UNSIGNED
3453 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3454 #else
3455 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3456 #endif
3457 }
3458
3459 /* A constant address in TO_RTX can have VOIDmode, we must not try
3460 to call force_reg for that case. Avoid that case. */
3461 if (GET_CODE (to_rtx) == MEM
3462 && GET_MODE (to_rtx) == BLKmode
3463 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3464 && bitsize
3465 && (bitpos % bitsize) == 0
3466 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3467 && alignment == GET_MODE_ALIGNMENT (mode1))
3468 {
3469 rtx temp = change_address (to_rtx, mode1,
3470 plus_constant (XEXP (to_rtx, 0),
3471 (bitpos /
3472 BITS_PER_UNIT)));
3473 if (GET_CODE (XEXP (temp, 0)) == REG)
3474 to_rtx = temp;
3475 else
3476 to_rtx = change_address (to_rtx, mode1,
3477 force_reg (GET_MODE (XEXP (temp, 0)),
3478 XEXP (temp, 0)));
3479 bitpos = 0;
3480 }
3481
3482 to_rtx = change_address (to_rtx, VOIDmode,
3483 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3484 force_reg (ptr_mode,
3485 offset_rtx)));
3486 }
3487
3488 if (volatilep)
3489 {
3490 if (GET_CODE (to_rtx) == MEM)
3491 {
3492 /* When the offset is zero, to_rtx is the address of the
3493 structure we are storing into, and hence may be shared.
3494 We must make a new MEM before setting the volatile bit. */
3495 if (offset == 0)
3496 to_rtx = copy_rtx (to_rtx);
3497
3498 MEM_VOLATILE_P (to_rtx) = 1;
3499 }
3500 #if 0 /* This was turned off because, when a field is volatile
3501 in an object which is not volatile, the object may be in a register,
3502 and then we would abort over here. */
3503 else
3504 abort ();
3505 #endif
3506 }
3507
3508 if (TREE_CODE (to) == COMPONENT_REF
3509 && TREE_READONLY (TREE_OPERAND (to, 1)))
3510 {
3511 if (offset == 0)
3512 to_rtx = copy_rtx (to_rtx);
3513
3514 RTX_UNCHANGING_P (to_rtx) = 1;
3515 }
3516
3517 /* Check the access. */
3518 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3519 {
3520 rtx to_addr;
3521 int size;
3522 int best_mode_size;
3523 enum machine_mode best_mode;
3524
3525 best_mode = get_best_mode (bitsize, bitpos,
3526 TYPE_ALIGN (TREE_TYPE (tem)),
3527 mode1, volatilep);
3528 if (best_mode == VOIDmode)
3529 best_mode = QImode;
3530
3531 best_mode_size = GET_MODE_BITSIZE (best_mode);
3532 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3533 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3534 size *= GET_MODE_SIZE (best_mode);
3535
3536 /* Check the access right of the pointer. */
3537 in_check_memory_usage = 1;
3538 if (size)
3539 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3540 to_addr, Pmode,
3541 GEN_INT (size), TYPE_MODE (sizetype),
3542 GEN_INT (MEMORY_USE_WO),
3543 TYPE_MODE (integer_type_node));
3544 in_check_memory_usage = 0;
3545 }
3546
3547 /* If this is a varying-length object, we must get the address of
3548 the source and do an explicit block move. */
3549 if (bitsize < 0)
3550 {
3551 unsigned int from_align;
3552 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3553 rtx inner_to_rtx
3554 = change_address (to_rtx, VOIDmode,
3555 plus_constant (XEXP (to_rtx, 0),
3556 bitpos / BITS_PER_UNIT));
3557
3558 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3559 MIN (alignment, from_align));
3560 free_temp_slots ();
3561 pop_temp_slots ();
3562 return to_rtx;
3563 }
3564 else
3565 {
3566 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3567 (want_value
3568 /* Spurious cast for HPUX compiler. */
3569 ? ((enum machine_mode)
3570 TYPE_MODE (TREE_TYPE (to)))
3571 : VOIDmode),
3572 unsignedp,
3573 alignment,
3574 int_size_in_bytes (TREE_TYPE (tem)),
3575 get_alias_set (to));
3576
3577 preserve_temp_slots (result);
3578 free_temp_slots ();
3579 pop_temp_slots ();
3580
3581 /* If the value is meaningful, convert RESULT to the proper mode.
3582 Otherwise, return nothing. */
3583 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3584 TYPE_MODE (TREE_TYPE (from)),
3585 result,
3586 TREE_UNSIGNED (TREE_TYPE (to)))
3587 : NULL_RTX);
3588 }
3589 }
3590
3591 /* If the rhs is a function call and its value is not an aggregate,
3592 call the function before we start to compute the lhs.
3593 This is needed for correct code for cases such as
3594 val = setjmp (buf) on machines where reference to val
3595 requires loading up part of an address in a separate insn.
3596
3597 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3598 since it might be a promoted variable where the zero- or sign- extension
3599 needs to be done. Handling this in the normal way is safe because no
3600 computation is done before the call. */
3601 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3602 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3603 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3604 && GET_CODE (DECL_RTL (to)) == REG))
3605 {
3606 rtx value;
3607
3608 push_temp_slots ();
3609 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3610 if (to_rtx == 0)
3611 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3612
3613 /* Handle calls that return values in multiple non-contiguous locations.
3614 The Irix 6 ABI has examples of this. */
3615 if (GET_CODE (to_rtx) == PARALLEL)
3616 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3617 TYPE_ALIGN (TREE_TYPE (from)));
3618 else if (GET_MODE (to_rtx) == BLKmode)
3619 emit_block_move (to_rtx, value, expr_size (from),
3620 TYPE_ALIGN (TREE_TYPE (from)));
3621 else
3622 {
3623 #ifdef POINTERS_EXTEND_UNSIGNED
3624 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3625 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3626 value = convert_memory_address (GET_MODE (to_rtx), value);
3627 #endif
3628 emit_move_insn (to_rtx, value);
3629 }
3630 preserve_temp_slots (to_rtx);
3631 free_temp_slots ();
3632 pop_temp_slots ();
3633 return want_value ? to_rtx : NULL_RTX;
3634 }
3635
3636 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3637 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3638
3639 if (to_rtx == 0)
3640 {
3641 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3642 if (GET_CODE (to_rtx) == MEM)
3643 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3644 }
3645
3646 /* Don't move directly into a return register. */
3647 if (TREE_CODE (to) == RESULT_DECL
3648 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3649 {
3650 rtx temp;
3651
3652 push_temp_slots ();
3653 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3654
3655 if (GET_CODE (to_rtx) == PARALLEL)
3656 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3657 TYPE_ALIGN (TREE_TYPE (from)));
3658 else
3659 emit_move_insn (to_rtx, temp);
3660
3661 preserve_temp_slots (to_rtx);
3662 free_temp_slots ();
3663 pop_temp_slots ();
3664 return want_value ? to_rtx : NULL_RTX;
3665 }
3666
3667 /* In case we are returning the contents of an object which overlaps
3668 the place the value is being stored, use a safe function when copying
3669 a value through a pointer into a structure value return block. */
3670 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3671 && current_function_returns_struct
3672 && !current_function_returns_pcc_struct)
3673 {
3674 rtx from_rtx, size;
3675
3676 push_temp_slots ();
3677 size = expr_size (from);
3678 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3679 EXPAND_MEMORY_USE_DONT);
3680
3681 /* Copy the rights of the bitmap. */
3682 if (current_function_check_memory_usage)
3683 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3684 XEXP (to_rtx, 0), Pmode,
3685 XEXP (from_rtx, 0), Pmode,
3686 convert_to_mode (TYPE_MODE (sizetype),
3687 size, TREE_UNSIGNED (sizetype)),
3688 TYPE_MODE (sizetype));
3689
3690 #ifdef TARGET_MEM_FUNCTIONS
3691 emit_library_call (memcpy_libfunc, 0,
3692 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3693 XEXP (from_rtx, 0), Pmode,
3694 convert_to_mode (TYPE_MODE (sizetype),
3695 size, TREE_UNSIGNED (sizetype)),
3696 TYPE_MODE (sizetype));
3697 #else
3698 emit_library_call (bcopy_libfunc, 0,
3699 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3700 XEXP (to_rtx, 0), Pmode,
3701 convert_to_mode (TYPE_MODE (integer_type_node),
3702 size, TREE_UNSIGNED (integer_type_node)),
3703 TYPE_MODE (integer_type_node));
3704 #endif
3705
3706 preserve_temp_slots (to_rtx);
3707 free_temp_slots ();
3708 pop_temp_slots ();
3709 return want_value ? to_rtx : NULL_RTX;
3710 }
3711
3712 /* Compute FROM and store the value in the rtx we got. */
3713
3714 push_temp_slots ();
3715 result = store_expr (from, to_rtx, want_value);
3716 preserve_temp_slots (result);
3717 free_temp_slots ();
3718 pop_temp_slots ();
3719 return want_value ? result : NULL_RTX;
3720 }
3721
3722 /* Generate code for computing expression EXP,
3723 and storing the value into TARGET.
3724 TARGET may contain a QUEUED rtx.
3725
3726 If WANT_VALUE is nonzero, return a copy of the value
3727 not in TARGET, so that we can be sure to use the proper
3728 value in a containing expression even if TARGET has something
3729 else stored in it. If possible, we copy the value through a pseudo
3730 and return that pseudo. Or, if the value is constant, we try to
3731 return the constant. In some cases, we return a pseudo
3732 copied *from* TARGET.
3733
3734 If the mode is BLKmode then we may return TARGET itself.
3735 It turns out that in BLKmode it doesn't cause a problem.
3736 because C has no operators that could combine two different
3737 assignments into the same BLKmode object with different values
3738 with no sequence point. Will other languages need this to
3739 be more thorough?
3740
3741 If WANT_VALUE is 0, we return NULL, to make sure
3742 to catch quickly any cases where the caller uses the value
3743 and fails to set WANT_VALUE. */
3744
3745 rtx
3746 store_expr (exp, target, want_value)
3747 register tree exp;
3748 register rtx target;
3749 int want_value;
3750 {
3751 register rtx temp;
3752 int dont_return_target = 0;
3753
3754 if (TREE_CODE (exp) == COMPOUND_EXPR)
3755 {
3756 /* Perform first part of compound expression, then assign from second
3757 part. */
3758 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3759 emit_queue ();
3760 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3761 }
3762 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3763 {
3764 /* For conditional expression, get safe form of the target. Then
3765 test the condition, doing the appropriate assignment on either
3766 side. This avoids the creation of unnecessary temporaries.
3767 For non-BLKmode, it is more efficient not to do this. */
3768
3769 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3770
3771 emit_queue ();
3772 target = protect_from_queue (target, 1);
3773
3774 do_pending_stack_adjust ();
3775 NO_DEFER_POP;
3776 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3777 start_cleanup_deferral ();
3778 store_expr (TREE_OPERAND (exp, 1), target, 0);
3779 end_cleanup_deferral ();
3780 emit_queue ();
3781 emit_jump_insn (gen_jump (lab2));
3782 emit_barrier ();
3783 emit_label (lab1);
3784 start_cleanup_deferral ();
3785 store_expr (TREE_OPERAND (exp, 2), target, 0);
3786 end_cleanup_deferral ();
3787 emit_queue ();
3788 emit_label (lab2);
3789 OK_DEFER_POP;
3790
3791 return want_value ? target : NULL_RTX;
3792 }
3793 else if (queued_subexp_p (target))
3794 /* If target contains a postincrement, let's not risk
3795 using it as the place to generate the rhs. */
3796 {
3797 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3798 {
3799 /* Expand EXP into a new pseudo. */
3800 temp = gen_reg_rtx (GET_MODE (target));
3801 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3802 }
3803 else
3804 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3805
3806 /* If target is volatile, ANSI requires accessing the value
3807 *from* the target, if it is accessed. So make that happen.
3808 In no case return the target itself. */
3809 if (! MEM_VOLATILE_P (target) && want_value)
3810 dont_return_target = 1;
3811 }
3812 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3813 && GET_MODE (target) != BLKmode)
3814 /* If target is in memory and caller wants value in a register instead,
3815 arrange that. Pass TARGET as target for expand_expr so that,
3816 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3817 We know expand_expr will not use the target in that case.
3818 Don't do this if TARGET is volatile because we are supposed
3819 to write it and then read it. */
3820 {
3821 temp = expand_expr (exp, target, GET_MODE (target), 0);
3822 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3823 temp = copy_to_reg (temp);
3824 dont_return_target = 1;
3825 }
3826 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3827 /* If this is an scalar in a register that is stored in a wider mode
3828 than the declared mode, compute the result into its declared mode
3829 and then convert to the wider mode. Our value is the computed
3830 expression. */
3831 {
3832 /* If we don't want a value, we can do the conversion inside EXP,
3833 which will often result in some optimizations. Do the conversion
3834 in two steps: first change the signedness, if needed, then
3835 the extend. But don't do this if the type of EXP is a subtype
3836 of something else since then the conversion might involve
3837 more than just converting modes. */
3838 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3839 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3840 {
3841 if (TREE_UNSIGNED (TREE_TYPE (exp))
3842 != SUBREG_PROMOTED_UNSIGNED_P (target))
3843 exp
3844 = convert
3845 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3846 TREE_TYPE (exp)),
3847 exp);
3848
3849 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3850 SUBREG_PROMOTED_UNSIGNED_P (target)),
3851 exp);
3852 }
3853
3854 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3855
3856 /* If TEMP is a volatile MEM and we want a result value, make
3857 the access now so it gets done only once. Likewise if
3858 it contains TARGET. */
3859 if (GET_CODE (temp) == MEM && want_value
3860 && (MEM_VOLATILE_P (temp)
3861 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3862 temp = copy_to_reg (temp);
3863
3864 /* If TEMP is a VOIDmode constant, use convert_modes to make
3865 sure that we properly convert it. */
3866 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3867 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3868 TYPE_MODE (TREE_TYPE (exp)), temp,
3869 SUBREG_PROMOTED_UNSIGNED_P (target));
3870
3871 convert_move (SUBREG_REG (target), temp,
3872 SUBREG_PROMOTED_UNSIGNED_P (target));
3873
3874 /* If we promoted a constant, change the mode back down to match
3875 target. Otherwise, the caller might get confused by a result whose
3876 mode is larger than expected. */
3877
3878 if (want_value && GET_MODE (temp) != GET_MODE (target)
3879 && GET_MODE (temp) != VOIDmode)
3880 {
3881 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3882 SUBREG_PROMOTED_VAR_P (temp) = 1;
3883 SUBREG_PROMOTED_UNSIGNED_P (temp)
3884 = SUBREG_PROMOTED_UNSIGNED_P (target);
3885 }
3886
3887 return want_value ? temp : NULL_RTX;
3888 }
3889 else
3890 {
3891 temp = expand_expr (exp, target, GET_MODE (target), 0);
3892 /* Return TARGET if it's a specified hardware register.
3893 If TARGET is a volatile mem ref, either return TARGET
3894 or return a reg copied *from* TARGET; ANSI requires this.
3895
3896 Otherwise, if TEMP is not TARGET, return TEMP
3897 if it is constant (for efficiency),
3898 or if we really want the correct value. */
3899 if (!(target && GET_CODE (target) == REG
3900 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3901 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3902 && ! rtx_equal_p (temp, target)
3903 && (CONSTANT_P (temp) || want_value))
3904 dont_return_target = 1;
3905 }
3906
3907 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3908 the same as that of TARGET, adjust the constant. This is needed, for
3909 example, in case it is a CONST_DOUBLE and we want only a word-sized
3910 value. */
3911 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3912 && TREE_CODE (exp) != ERROR_MARK
3913 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3914 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3915 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3916
3917 if (current_function_check_memory_usage
3918 && GET_CODE (target) == MEM
3919 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3920 {
3921 in_check_memory_usage = 1;
3922 if (GET_CODE (temp) == MEM)
3923 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3924 XEXP (target, 0), Pmode,
3925 XEXP (temp, 0), Pmode,
3926 expr_size (exp), TYPE_MODE (sizetype));
3927 else
3928 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3929 XEXP (target, 0), Pmode,
3930 expr_size (exp), TYPE_MODE (sizetype),
3931 GEN_INT (MEMORY_USE_WO),
3932 TYPE_MODE (integer_type_node));
3933 in_check_memory_usage = 0;
3934 }
3935
3936 /* If value was not generated in the target, store it there.
3937 Convert the value to TARGET's type first if nec. */
3938 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3939 one or both of them are volatile memory refs, we have to distinguish
3940 two cases:
3941 - expand_expr has used TARGET. In this case, we must not generate
3942 another copy. This can be detected by TARGET being equal according
3943 to == .
3944 - expand_expr has not used TARGET - that means that the source just
3945 happens to have the same RTX form. Since temp will have been created
3946 by expand_expr, it will compare unequal according to == .
3947 We must generate a copy in this case, to reach the correct number
3948 of volatile memory references. */
3949
3950 if ((! rtx_equal_p (temp, target)
3951 || (temp != target && (side_effects_p (temp)
3952 || side_effects_p (target))))
3953 && TREE_CODE (exp) != ERROR_MARK)
3954 {
3955 target = protect_from_queue (target, 1);
3956 if (GET_MODE (temp) != GET_MODE (target)
3957 && GET_MODE (temp) != VOIDmode)
3958 {
3959 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3960 if (dont_return_target)
3961 {
3962 /* In this case, we will return TEMP,
3963 so make sure it has the proper mode.
3964 But don't forget to store the value into TARGET. */
3965 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3966 emit_move_insn (target, temp);
3967 }
3968 else
3969 convert_move (target, temp, unsignedp);
3970 }
3971
3972 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3973 {
3974 /* Handle copying a string constant into an array.
3975 The string constant may be shorter than the array.
3976 So copy just the string's actual length, and clear the rest. */
3977 rtx size;
3978 rtx addr;
3979
3980 /* Get the size of the data type of the string,
3981 which is actually the size of the target. */
3982 size = expr_size (exp);
3983 if (GET_CODE (size) == CONST_INT
3984 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3985 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3986 else
3987 {
3988 /* Compute the size of the data to copy from the string. */
3989 tree copy_size
3990 = size_binop (MIN_EXPR,
3991 make_tree (sizetype, size),
3992 size_int (TREE_STRING_LENGTH (exp)));
3993 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
3994 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3995 VOIDmode, 0);
3996 rtx label = 0;
3997
3998 /* Copy that much. */
3999 emit_block_move (target, temp, copy_size_rtx,
4000 TYPE_ALIGN (TREE_TYPE (exp)));
4001
4002 /* Figure out how much is left in TARGET that we have to clear.
4003 Do all calculations in ptr_mode. */
4004
4005 addr = XEXP (target, 0);
4006 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4007
4008 if (GET_CODE (copy_size_rtx) == CONST_INT)
4009 {
4010 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4011 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4012 align = MIN (align, (BITS_PER_UNIT
4013 * (INTVAL (copy_size_rtx)
4014 & - INTVAL (copy_size_rtx))));
4015 }
4016 else
4017 {
4018 addr = force_reg (ptr_mode, addr);
4019 addr = expand_binop (ptr_mode, add_optab, addr,
4020 copy_size_rtx, NULL_RTX, 0,
4021 OPTAB_LIB_WIDEN);
4022
4023 size = expand_binop (ptr_mode, sub_optab, size,
4024 copy_size_rtx, NULL_RTX, 0,
4025 OPTAB_LIB_WIDEN);
4026
4027 align = BITS_PER_UNIT;
4028 label = gen_label_rtx ();
4029 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4030 GET_MODE (size), 0, 0, label);
4031 }
4032 align = MIN (align, expr_align (copy_size));
4033
4034 if (size != const0_rtx)
4035 {
4036 rtx dest = gen_rtx_MEM (BLKmode, addr);
4037
4038 MEM_COPY_ATTRIBUTES (dest, target);
4039
4040 /* Be sure we can write on ADDR. */
4041 in_check_memory_usage = 1;
4042 if (current_function_check_memory_usage)
4043 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
4044 addr, Pmode,
4045 size, TYPE_MODE (sizetype),
4046 GEN_INT (MEMORY_USE_WO),
4047 TYPE_MODE (integer_type_node));
4048 in_check_memory_usage = 0;
4049 clear_storage (dest, size, align);
4050 }
4051
4052 if (label)
4053 emit_label (label);
4054 }
4055 }
4056 /* Handle calls that return values in multiple non-contiguous locations.
4057 The Irix 6 ABI has examples of this. */
4058 else if (GET_CODE (target) == PARALLEL)
4059 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4060 TYPE_ALIGN (TREE_TYPE (exp)));
4061 else if (GET_MODE (temp) == BLKmode)
4062 emit_block_move (target, temp, expr_size (exp),
4063 TYPE_ALIGN (TREE_TYPE (exp)));
4064 else
4065 emit_move_insn (target, temp);
4066 }
4067
4068 /* If we don't want a value, return NULL_RTX. */
4069 if (! want_value)
4070 return NULL_RTX;
4071
4072 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4073 ??? The latter test doesn't seem to make sense. */
4074 else if (dont_return_target && GET_CODE (temp) != MEM)
4075 return temp;
4076
4077 /* Return TARGET itself if it is a hard register. */
4078 else if (want_value && GET_MODE (target) != BLKmode
4079 && ! (GET_CODE (target) == REG
4080 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4081 return copy_to_reg (target);
4082
4083 else
4084 return target;
4085 }
4086 \f
4087 /* Return 1 if EXP just contains zeros. */
4088
4089 static int
4090 is_zeros_p (exp)
4091 tree exp;
4092 {
4093 tree elt;
4094
4095 switch (TREE_CODE (exp))
4096 {
4097 case CONVERT_EXPR:
4098 case NOP_EXPR:
4099 case NON_LVALUE_EXPR:
4100 return is_zeros_p (TREE_OPERAND (exp, 0));
4101
4102 case INTEGER_CST:
4103 return integer_zerop (exp);
4104
4105 case COMPLEX_CST:
4106 return
4107 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4108
4109 case REAL_CST:
4110 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4111
4112 case CONSTRUCTOR:
4113 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4114 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4115 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4116 if (! is_zeros_p (TREE_VALUE (elt)))
4117 return 0;
4118
4119 return 1;
4120
4121 default:
4122 return 0;
4123 }
4124 }
4125
4126 /* Return 1 if EXP contains mostly (3/4) zeros. */
4127
4128 static int
4129 mostly_zeros_p (exp)
4130 tree exp;
4131 {
4132 if (TREE_CODE (exp) == CONSTRUCTOR)
4133 {
4134 int elts = 0, zeros = 0;
4135 tree elt = CONSTRUCTOR_ELTS (exp);
4136 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4137 {
4138 /* If there are no ranges of true bits, it is all zero. */
4139 return elt == NULL_TREE;
4140 }
4141 for (; elt; elt = TREE_CHAIN (elt))
4142 {
4143 /* We do not handle the case where the index is a RANGE_EXPR,
4144 so the statistic will be somewhat inaccurate.
4145 We do make a more accurate count in store_constructor itself,
4146 so since this function is only used for nested array elements,
4147 this should be close enough. */
4148 if (mostly_zeros_p (TREE_VALUE (elt)))
4149 zeros++;
4150 elts++;
4151 }
4152
4153 return 4 * zeros >= 3 * elts;
4154 }
4155
4156 return is_zeros_p (exp);
4157 }
4158 \f
4159 /* Helper function for store_constructor.
4160 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4161 TYPE is the type of the CONSTRUCTOR, not the element type.
4162 ALIGN and CLEARED are as for store_constructor.
4163
4164 This provides a recursive shortcut back to store_constructor when it isn't
4165 necessary to go through store_field. This is so that we can pass through
4166 the cleared field to let store_constructor know that we may not have to
4167 clear a substructure if the outer structure has already been cleared. */
4168
4169 static void
4170 store_constructor_field (target, bitsize, bitpos,
4171 mode, exp, type, align, cleared)
4172 rtx target;
4173 unsigned HOST_WIDE_INT bitsize;
4174 HOST_WIDE_INT bitpos;
4175 enum machine_mode mode;
4176 tree exp, type;
4177 unsigned int align;
4178 int cleared;
4179 {
4180 if (TREE_CODE (exp) == CONSTRUCTOR
4181 && bitpos % BITS_PER_UNIT == 0
4182 /* If we have a non-zero bitpos for a register target, then we just
4183 let store_field do the bitfield handling. This is unlikely to
4184 generate unnecessary clear instructions anyways. */
4185 && (bitpos == 0 || GET_CODE (target) == MEM))
4186 {
4187 if (bitpos != 0)
4188 target
4189 = change_address (target,
4190 GET_MODE (target) == BLKmode
4191 || 0 != (bitpos
4192 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4193 ? BLKmode : VOIDmode,
4194 plus_constant (XEXP (target, 0),
4195 bitpos / BITS_PER_UNIT));
4196 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4197 }
4198 else
4199 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4200 int_size_in_bytes (type), 0);
4201 }
4202
4203 /* Store the value of constructor EXP into the rtx TARGET.
4204 TARGET is either a REG or a MEM.
4205 ALIGN is the maximum known alignment for TARGET.
4206 CLEARED is true if TARGET is known to have been zero'd.
4207 SIZE is the number of bytes of TARGET we are allowed to modify: this
4208 may not be the same as the size of EXP if we are assigning to a field
4209 which has been packed to exclude padding bits. */
4210
4211 static void
4212 store_constructor (exp, target, align, cleared, size)
4213 tree exp;
4214 rtx target;
4215 unsigned int align;
4216 int cleared;
4217 HOST_WIDE_INT size;
4218 {
4219 tree type = TREE_TYPE (exp);
4220 #ifdef WORD_REGISTER_OPERATIONS
4221 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4222 #endif
4223
4224 /* We know our target cannot conflict, since safe_from_p has been called. */
4225 #if 0
4226 /* Don't try copying piece by piece into a hard register
4227 since that is vulnerable to being clobbered by EXP.
4228 Instead, construct in a pseudo register and then copy it all. */
4229 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4230 {
4231 rtx temp = gen_reg_rtx (GET_MODE (target));
4232 store_constructor (exp, temp, align, cleared, size);
4233 emit_move_insn (target, temp);
4234 return;
4235 }
4236 #endif
4237
4238 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4239 || TREE_CODE (type) == QUAL_UNION_TYPE)
4240 {
4241 register tree elt;
4242
4243 /* Inform later passes that the whole union value is dead. */
4244 if ((TREE_CODE (type) == UNION_TYPE
4245 || TREE_CODE (type) == QUAL_UNION_TYPE)
4246 && ! cleared)
4247 {
4248 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4249
4250 /* If the constructor is empty, clear the union. */
4251 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4252 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4253 }
4254
4255 /* If we are building a static constructor into a register,
4256 set the initial value as zero so we can fold the value into
4257 a constant. But if more than one register is involved,
4258 this probably loses. */
4259 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4260 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4261 {
4262 if (! cleared)
4263 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4264
4265 cleared = 1;
4266 }
4267
4268 /* If the constructor has fewer fields than the structure
4269 or if we are initializing the structure to mostly zeros,
4270 clear the whole structure first. */
4271 else if (size > 0
4272 && ((list_length (CONSTRUCTOR_ELTS (exp))
4273 != fields_length (type))
4274 || mostly_zeros_p (exp)))
4275 {
4276 if (! cleared)
4277 clear_storage (target, GEN_INT (size), align);
4278
4279 cleared = 1;
4280 }
4281 else if (! cleared)
4282 /* Inform later passes that the old value is dead. */
4283 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4284
4285 /* Store each element of the constructor into
4286 the corresponding field of TARGET. */
4287
4288 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4289 {
4290 register tree field = TREE_PURPOSE (elt);
4291 #ifdef WORD_REGISTER_OPERATIONS
4292 tree value = TREE_VALUE (elt);
4293 #endif
4294 register enum machine_mode mode;
4295 HOST_WIDE_INT bitsize;
4296 HOST_WIDE_INT bitpos = 0;
4297 int unsignedp;
4298 tree offset;
4299 rtx to_rtx = target;
4300
4301 /* Just ignore missing fields.
4302 We cleared the whole structure, above,
4303 if any fields are missing. */
4304 if (field == 0)
4305 continue;
4306
4307 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4308 continue;
4309
4310 if (host_integerp (DECL_SIZE (field), 1))
4311 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4312 else
4313 bitsize = -1;
4314
4315 unsignedp = TREE_UNSIGNED (field);
4316 mode = DECL_MODE (field);
4317 if (DECL_BIT_FIELD (field))
4318 mode = VOIDmode;
4319
4320 offset = DECL_FIELD_OFFSET (field);
4321 if (host_integerp (offset, 0)
4322 && host_integerp (bit_position (field), 0))
4323 {
4324 bitpos = int_bit_position (field);
4325 offset = 0;
4326 }
4327 else
4328 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4329
4330 if (offset)
4331 {
4332 rtx offset_rtx;
4333
4334 if (contains_placeholder_p (offset))
4335 offset = build (WITH_RECORD_EXPR, sizetype,
4336 offset, make_tree (TREE_TYPE (exp), target));
4337
4338 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4339 if (GET_CODE (to_rtx) != MEM)
4340 abort ();
4341
4342 if (GET_MODE (offset_rtx) != ptr_mode)
4343 {
4344 #ifdef POINTERS_EXTEND_UNSIGNED
4345 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4346 #else
4347 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4348 #endif
4349 }
4350
4351 to_rtx
4352 = change_address (to_rtx, VOIDmode,
4353 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4354 force_reg (ptr_mode,
4355 offset_rtx)));
4356 align = DECL_OFFSET_ALIGN (field);
4357 }
4358
4359 if (TREE_READONLY (field))
4360 {
4361 if (GET_CODE (to_rtx) == MEM)
4362 to_rtx = copy_rtx (to_rtx);
4363
4364 RTX_UNCHANGING_P (to_rtx) = 1;
4365 }
4366
4367 #ifdef WORD_REGISTER_OPERATIONS
4368 /* If this initializes a field that is smaller than a word, at the
4369 start of a word, try to widen it to a full word.
4370 This special case allows us to output C++ member function
4371 initializations in a form that the optimizers can understand. */
4372 if (GET_CODE (target) == REG
4373 && bitsize < BITS_PER_WORD
4374 && bitpos % BITS_PER_WORD == 0
4375 && GET_MODE_CLASS (mode) == MODE_INT
4376 && TREE_CODE (value) == INTEGER_CST
4377 && exp_size >= 0
4378 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4379 {
4380 tree type = TREE_TYPE (value);
4381 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4382 {
4383 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4384 value = convert (type, value);
4385 }
4386 if (BYTES_BIG_ENDIAN)
4387 value
4388 = fold (build (LSHIFT_EXPR, type, value,
4389 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4390 bitsize = BITS_PER_WORD;
4391 mode = word_mode;
4392 }
4393 #endif
4394 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4395 TREE_VALUE (elt), type, align, cleared);
4396 }
4397 }
4398 else if (TREE_CODE (type) == ARRAY_TYPE)
4399 {
4400 register tree elt;
4401 register int i;
4402 int need_to_clear;
4403 tree domain = TYPE_DOMAIN (type);
4404 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4405 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4406 tree elttype = TREE_TYPE (type);
4407
4408 /* If the constructor has fewer elements than the array,
4409 clear the whole array first. Similarly if this is
4410 static constructor of a non-BLKmode object. */
4411 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4412 need_to_clear = 1;
4413 else
4414 {
4415 HOST_WIDE_INT count = 0, zero_count = 0;
4416 need_to_clear = 0;
4417 /* This loop is a more accurate version of the loop in
4418 mostly_zeros_p (it handles RANGE_EXPR in an index).
4419 It is also needed to check for missing elements. */
4420 for (elt = CONSTRUCTOR_ELTS (exp);
4421 elt != NULL_TREE;
4422 elt = TREE_CHAIN (elt))
4423 {
4424 tree index = TREE_PURPOSE (elt);
4425 HOST_WIDE_INT this_node_count;
4426
4427 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4428 {
4429 tree lo_index = TREE_OPERAND (index, 0);
4430 tree hi_index = TREE_OPERAND (index, 1);
4431
4432 if (! host_integerp (lo_index, 1)
4433 || ! host_integerp (hi_index, 1))
4434 {
4435 need_to_clear = 1;
4436 break;
4437 }
4438
4439 this_node_count = (tree_low_cst (hi_index, 1)
4440 - tree_low_cst (lo_index, 1) + 1);
4441 }
4442 else
4443 this_node_count = 1;
4444 count += this_node_count;
4445 if (mostly_zeros_p (TREE_VALUE (elt)))
4446 zero_count += this_node_count;
4447 }
4448 /* Clear the entire array first if there are any missing elements,
4449 or if the incidence of zero elements is >= 75%. */
4450 if (count < maxelt - minelt + 1
4451 || 4 * zero_count >= 3 * count)
4452 need_to_clear = 1;
4453 }
4454 if (need_to_clear && size > 0)
4455 {
4456 if (! cleared)
4457 clear_storage (target, GEN_INT (size), align);
4458 cleared = 1;
4459 }
4460 else
4461 /* Inform later passes that the old value is dead. */
4462 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4463
4464 /* Store each element of the constructor into
4465 the corresponding element of TARGET, determined
4466 by counting the elements. */
4467 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4468 elt;
4469 elt = TREE_CHAIN (elt), i++)
4470 {
4471 register enum machine_mode mode;
4472 HOST_WIDE_INT bitsize;
4473 HOST_WIDE_INT bitpos;
4474 int unsignedp;
4475 tree value = TREE_VALUE (elt);
4476 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4477 tree index = TREE_PURPOSE (elt);
4478 rtx xtarget = target;
4479
4480 if (cleared && is_zeros_p (value))
4481 continue;
4482
4483 unsignedp = TREE_UNSIGNED (elttype);
4484 mode = TYPE_MODE (elttype);
4485 if (mode == BLKmode)
4486 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4487 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4488 : -1);
4489 else
4490 bitsize = GET_MODE_BITSIZE (mode);
4491
4492 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4493 {
4494 tree lo_index = TREE_OPERAND (index, 0);
4495 tree hi_index = TREE_OPERAND (index, 1);
4496 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4497 struct nesting *loop;
4498 HOST_WIDE_INT lo, hi, count;
4499 tree position;
4500
4501 /* If the range is constant and "small", unroll the loop. */
4502 if (host_integerp (lo_index, 0)
4503 && host_integerp (hi_index, 0)
4504 && (lo = tree_low_cst (lo_index, 0),
4505 hi = tree_low_cst (hi_index, 0),
4506 count = hi - lo + 1,
4507 (GET_CODE (target) != MEM
4508 || count <= 2
4509 || (host_integerp (TYPE_SIZE (elttype), 1)
4510 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4511 <= 40 * 8)))))
4512 {
4513 lo -= minelt; hi -= minelt;
4514 for (; lo <= hi; lo++)
4515 {
4516 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4517 store_constructor_field (target, bitsize, bitpos, mode,
4518 value, type, align, cleared);
4519 }
4520 }
4521 else
4522 {
4523 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4524 loop_top = gen_label_rtx ();
4525 loop_end = gen_label_rtx ();
4526
4527 unsignedp = TREE_UNSIGNED (domain);
4528
4529 index = build_decl (VAR_DECL, NULL_TREE, domain);
4530
4531 DECL_RTL (index) = index_r
4532 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4533 &unsignedp, 0));
4534
4535 if (TREE_CODE (value) == SAVE_EXPR
4536 && SAVE_EXPR_RTL (value) == 0)
4537 {
4538 /* Make sure value gets expanded once before the
4539 loop. */
4540 expand_expr (value, const0_rtx, VOIDmode, 0);
4541 emit_queue ();
4542 }
4543 store_expr (lo_index, index_r, 0);
4544 loop = expand_start_loop (0);
4545
4546 /* Assign value to element index. */
4547 position
4548 = convert (ssizetype,
4549 fold (build (MINUS_EXPR, TREE_TYPE (index),
4550 index, TYPE_MIN_VALUE (domain))));
4551 position = size_binop (MULT_EXPR, position,
4552 convert (ssizetype,
4553 TYPE_SIZE_UNIT (elttype)));
4554
4555 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4556 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4557 xtarget = change_address (target, mode, addr);
4558 if (TREE_CODE (value) == CONSTRUCTOR)
4559 store_constructor (value, xtarget, align, cleared,
4560 bitsize / BITS_PER_UNIT);
4561 else
4562 store_expr (value, xtarget, 0);
4563
4564 expand_exit_loop_if_false (loop,
4565 build (LT_EXPR, integer_type_node,
4566 index, hi_index));
4567
4568 expand_increment (build (PREINCREMENT_EXPR,
4569 TREE_TYPE (index),
4570 index, integer_one_node), 0, 0);
4571 expand_end_loop ();
4572 emit_label (loop_end);
4573 }
4574 }
4575 else if ((index != 0 && ! host_integerp (index, 0))
4576 || ! host_integerp (TYPE_SIZE (elttype), 1))
4577 {
4578 rtx pos_rtx, addr;
4579 tree position;
4580
4581 if (index == 0)
4582 index = ssize_int (1);
4583
4584 if (minelt)
4585 index = convert (ssizetype,
4586 fold (build (MINUS_EXPR, index,
4587 TYPE_MIN_VALUE (domain))));
4588
4589 position = size_binop (MULT_EXPR, index,
4590 convert (ssizetype,
4591 TYPE_SIZE_UNIT (elttype)));
4592 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4593 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4594 xtarget = change_address (target, mode, addr);
4595 store_expr (value, xtarget, 0);
4596 }
4597 else
4598 {
4599 if (index != 0)
4600 bitpos = ((tree_low_cst (index, 0) - minelt)
4601 * tree_low_cst (TYPE_SIZE (elttype), 1));
4602 else
4603 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4604
4605 store_constructor_field (target, bitsize, bitpos, mode, value,
4606 type, align, cleared);
4607 }
4608 }
4609 }
4610
4611 /* Set constructor assignments. */
4612 else if (TREE_CODE (type) == SET_TYPE)
4613 {
4614 tree elt = CONSTRUCTOR_ELTS (exp);
4615 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4616 tree domain = TYPE_DOMAIN (type);
4617 tree domain_min, domain_max, bitlength;
4618
4619 /* The default implementation strategy is to extract the constant
4620 parts of the constructor, use that to initialize the target,
4621 and then "or" in whatever non-constant ranges we need in addition.
4622
4623 If a large set is all zero or all ones, it is
4624 probably better to set it using memset (if available) or bzero.
4625 Also, if a large set has just a single range, it may also be
4626 better to first clear all the first clear the set (using
4627 bzero/memset), and set the bits we want. */
4628
4629 /* Check for all zeros. */
4630 if (elt == NULL_TREE && size > 0)
4631 {
4632 if (!cleared)
4633 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4634 return;
4635 }
4636
4637 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4638 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4639 bitlength = size_binop (PLUS_EXPR,
4640 size_diffop (domain_max, domain_min),
4641 ssize_int (1));
4642
4643 nbits = tree_low_cst (bitlength, 1);
4644
4645 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4646 are "complicated" (more than one range), initialize (the
4647 constant parts) by copying from a constant. */
4648 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4649 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4650 {
4651 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4652 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4653 char *bit_buffer = (char *) alloca (nbits);
4654 HOST_WIDE_INT word = 0;
4655 unsigned int bit_pos = 0;
4656 unsigned int ibit = 0;
4657 unsigned int offset = 0; /* In bytes from beginning of set. */
4658
4659 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4660 for (;;)
4661 {
4662 if (bit_buffer[ibit])
4663 {
4664 if (BYTES_BIG_ENDIAN)
4665 word |= (1 << (set_word_size - 1 - bit_pos));
4666 else
4667 word |= 1 << bit_pos;
4668 }
4669
4670 bit_pos++; ibit++;
4671 if (bit_pos >= set_word_size || ibit == nbits)
4672 {
4673 if (word != 0 || ! cleared)
4674 {
4675 rtx datum = GEN_INT (word);
4676 rtx to_rtx;
4677
4678 /* The assumption here is that it is safe to use
4679 XEXP if the set is multi-word, but not if
4680 it's single-word. */
4681 if (GET_CODE (target) == MEM)
4682 {
4683 to_rtx = plus_constant (XEXP (target, 0), offset);
4684 to_rtx = change_address (target, mode, to_rtx);
4685 }
4686 else if (offset == 0)
4687 to_rtx = target;
4688 else
4689 abort ();
4690 emit_move_insn (to_rtx, datum);
4691 }
4692
4693 if (ibit == nbits)
4694 break;
4695 word = 0;
4696 bit_pos = 0;
4697 offset += set_word_size / BITS_PER_UNIT;
4698 }
4699 }
4700 }
4701 else if (!cleared)
4702 /* Don't bother clearing storage if the set is all ones. */
4703 if (TREE_CHAIN (elt) != NULL_TREE
4704 || (TREE_PURPOSE (elt) == NULL_TREE
4705 ? nbits != 1
4706 : ( ! host_integerp (TREE_VALUE (elt), 0)
4707 || ! host_integerp (TREE_PURPOSE (elt), 0)
4708 || (tree_low_cst (TREE_VALUE (elt), 0)
4709 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4710 != (HOST_WIDE_INT) nbits))))
4711 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4712
4713 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4714 {
4715 /* Start of range of element or NULL. */
4716 tree startbit = TREE_PURPOSE (elt);
4717 /* End of range of element, or element value. */
4718 tree endbit = TREE_VALUE (elt);
4719 #ifdef TARGET_MEM_FUNCTIONS
4720 HOST_WIDE_INT startb, endb;
4721 #endif
4722 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4723
4724 bitlength_rtx = expand_expr (bitlength,
4725 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4726
4727 /* Handle non-range tuple element like [ expr ]. */
4728 if (startbit == NULL_TREE)
4729 {
4730 startbit = save_expr (endbit);
4731 endbit = startbit;
4732 }
4733
4734 startbit = convert (sizetype, startbit);
4735 endbit = convert (sizetype, endbit);
4736 if (! integer_zerop (domain_min))
4737 {
4738 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4739 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4740 }
4741 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4742 EXPAND_CONST_ADDRESS);
4743 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4744 EXPAND_CONST_ADDRESS);
4745
4746 if (REG_P (target))
4747 {
4748 targetx = assign_stack_temp (GET_MODE (target),
4749 GET_MODE_SIZE (GET_MODE (target)),
4750 0);
4751 emit_move_insn (targetx, target);
4752 }
4753
4754 else if (GET_CODE (target) == MEM)
4755 targetx = target;
4756 else
4757 abort ();
4758
4759 #ifdef TARGET_MEM_FUNCTIONS
4760 /* Optimization: If startbit and endbit are
4761 constants divisible by BITS_PER_UNIT,
4762 call memset instead. */
4763 if (TREE_CODE (startbit) == INTEGER_CST
4764 && TREE_CODE (endbit) == INTEGER_CST
4765 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4766 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4767 {
4768 emit_library_call (memset_libfunc, 0,
4769 VOIDmode, 3,
4770 plus_constant (XEXP (targetx, 0),
4771 startb / BITS_PER_UNIT),
4772 Pmode,
4773 constm1_rtx, TYPE_MODE (integer_type_node),
4774 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4775 TYPE_MODE (sizetype));
4776 }
4777 else
4778 #endif
4779 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4780 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4781 bitlength_rtx, TYPE_MODE (sizetype),
4782 startbit_rtx, TYPE_MODE (sizetype),
4783 endbit_rtx, TYPE_MODE (sizetype));
4784
4785 if (REG_P (target))
4786 emit_move_insn (target, targetx);
4787 }
4788 }
4789
4790 else
4791 abort ();
4792 }
4793
4794 /* Store the value of EXP (an expression tree)
4795 into a subfield of TARGET which has mode MODE and occupies
4796 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4797 If MODE is VOIDmode, it means that we are storing into a bit-field.
4798
4799 If VALUE_MODE is VOIDmode, return nothing in particular.
4800 UNSIGNEDP is not used in this case.
4801
4802 Otherwise, return an rtx for the value stored. This rtx
4803 has mode VALUE_MODE if that is convenient to do.
4804 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4805
4806 ALIGN is the alignment that TARGET is known to have.
4807 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4808
4809 ALIAS_SET is the alias set for the destination. This value will
4810 (in general) be different from that for TARGET, since TARGET is a
4811 reference to the containing structure. */
4812
4813 static rtx
4814 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4815 unsignedp, align, total_size, alias_set)
4816 rtx target;
4817 HOST_WIDE_INT bitsize;
4818 HOST_WIDE_INT bitpos;
4819 enum machine_mode mode;
4820 tree exp;
4821 enum machine_mode value_mode;
4822 int unsignedp;
4823 unsigned int align;
4824 HOST_WIDE_INT total_size;
4825 int alias_set;
4826 {
4827 HOST_WIDE_INT width_mask = 0;
4828
4829 if (TREE_CODE (exp) == ERROR_MARK)
4830 return const0_rtx;
4831
4832 if (bitsize < HOST_BITS_PER_WIDE_INT)
4833 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4834
4835 /* If we are storing into an unaligned field of an aligned union that is
4836 in a register, we may have the mode of TARGET being an integer mode but
4837 MODE == BLKmode. In that case, get an aligned object whose size and
4838 alignment are the same as TARGET and store TARGET into it (we can avoid
4839 the store if the field being stored is the entire width of TARGET). Then
4840 call ourselves recursively to store the field into a BLKmode version of
4841 that object. Finally, load from the object into TARGET. This is not
4842 very efficient in general, but should only be slightly more expensive
4843 than the otherwise-required unaligned accesses. Perhaps this can be
4844 cleaned up later. */
4845
4846 if (mode == BLKmode
4847 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4848 {
4849 rtx object = assign_stack_temp (GET_MODE (target),
4850 GET_MODE_SIZE (GET_MODE (target)), 0);
4851 rtx blk_object = copy_rtx (object);
4852
4853 MEM_SET_IN_STRUCT_P (object, 1);
4854 MEM_SET_IN_STRUCT_P (blk_object, 1);
4855 PUT_MODE (blk_object, BLKmode);
4856
4857 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4858 emit_move_insn (object, target);
4859
4860 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4861 align, total_size, alias_set);
4862
4863 /* Even though we aren't returning target, we need to
4864 give it the updated value. */
4865 emit_move_insn (target, object);
4866
4867 return blk_object;
4868 }
4869
4870 if (GET_CODE (target) == CONCAT)
4871 {
4872 /* We're storing into a struct containing a single __complex. */
4873
4874 if (bitpos != 0)
4875 abort ();
4876 return store_expr (exp, target, 0);
4877 }
4878
4879 /* If the structure is in a register or if the component
4880 is a bit field, we cannot use addressing to access it.
4881 Use bit-field techniques or SUBREG to store in it. */
4882
4883 if (mode == VOIDmode
4884 || (mode != BLKmode && ! direct_store[(int) mode]
4885 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4886 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4887 || GET_CODE (target) == REG
4888 || GET_CODE (target) == SUBREG
4889 /* If the field isn't aligned enough to store as an ordinary memref,
4890 store it as a bit field. */
4891 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4892 && (align < GET_MODE_ALIGNMENT (mode)
4893 || bitpos % GET_MODE_ALIGNMENT (mode)))
4894 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4895 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4896 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4897 /* If the RHS and field are a constant size and the size of the
4898 RHS isn't the same size as the bitfield, we must use bitfield
4899 operations. */
4900 || (bitsize >= 0
4901 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4902 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4903 {
4904 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4905
4906 /* If BITSIZE is narrower than the size of the type of EXP
4907 we will be narrowing TEMP. Normally, what's wanted are the
4908 low-order bits. However, if EXP's type is a record and this is
4909 big-endian machine, we want the upper BITSIZE bits. */
4910 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4911 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4912 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4913 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4914 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4915 - bitsize),
4916 temp, 1);
4917
4918 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4919 MODE. */
4920 if (mode != VOIDmode && mode != BLKmode
4921 && mode != TYPE_MODE (TREE_TYPE (exp)))
4922 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4923
4924 /* If the modes of TARGET and TEMP are both BLKmode, both
4925 must be in memory and BITPOS must be aligned on a byte
4926 boundary. If so, we simply do a block copy. */
4927 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4928 {
4929 unsigned int exp_align = expr_align (exp);
4930
4931 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4932 || bitpos % BITS_PER_UNIT != 0)
4933 abort ();
4934
4935 target = change_address (target, VOIDmode,
4936 plus_constant (XEXP (target, 0),
4937 bitpos / BITS_PER_UNIT));
4938
4939 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4940 align = MIN (exp_align, align);
4941
4942 /* Find an alignment that is consistent with the bit position. */
4943 while ((bitpos % align) != 0)
4944 align >>= 1;
4945
4946 emit_block_move (target, temp,
4947 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4948 / BITS_PER_UNIT),
4949 align);
4950
4951 return value_mode == VOIDmode ? const0_rtx : target;
4952 }
4953
4954 /* Store the value in the bitfield. */
4955 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4956 if (value_mode != VOIDmode)
4957 {
4958 /* The caller wants an rtx for the value. */
4959 /* If possible, avoid refetching from the bitfield itself. */
4960 if (width_mask != 0
4961 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4962 {
4963 tree count;
4964 enum machine_mode tmode;
4965
4966 if (unsignedp)
4967 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4968 tmode = GET_MODE (temp);
4969 if (tmode == VOIDmode)
4970 tmode = value_mode;
4971 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4972 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4973 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4974 }
4975 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4976 NULL_RTX, value_mode, 0, align,
4977 total_size);
4978 }
4979 return const0_rtx;
4980 }
4981 else
4982 {
4983 rtx addr = XEXP (target, 0);
4984 rtx to_rtx;
4985
4986 /* If a value is wanted, it must be the lhs;
4987 so make the address stable for multiple use. */
4988
4989 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4990 && ! CONSTANT_ADDRESS_P (addr)
4991 /* A frame-pointer reference is already stable. */
4992 && ! (GET_CODE (addr) == PLUS
4993 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4994 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4995 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4996 addr = copy_to_reg (addr);
4997
4998 /* Now build a reference to just the desired component. */
4999
5000 to_rtx = copy_rtx (change_address (target, mode,
5001 plus_constant (addr,
5002 (bitpos
5003 / BITS_PER_UNIT))));
5004 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5005 MEM_ALIAS_SET (to_rtx) = alias_set;
5006
5007 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5008 }
5009 }
5010 \f
5011 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5012 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5013 ARRAY_REFs and find the ultimate containing object, which we return.
5014
5015 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5016 bit position, and *PUNSIGNEDP to the signedness of the field.
5017 If the position of the field is variable, we store a tree
5018 giving the variable offset (in units) in *POFFSET.
5019 This offset is in addition to the bit position.
5020 If the position is not variable, we store 0 in *POFFSET.
5021 We set *PALIGNMENT to the alignment of the address that will be
5022 computed. This is the alignment of the thing we return if *POFFSET
5023 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5024
5025 If any of the extraction expressions is volatile,
5026 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5027
5028 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5029 is a mode that can be used to access the field. In that case, *PBITSIZE
5030 is redundant.
5031
5032 If the field describes a variable-sized object, *PMODE is set to
5033 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5034 this case, but the address of the object can be found. */
5035
5036 tree
5037 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5038 punsignedp, pvolatilep, palignment)
5039 tree exp;
5040 HOST_WIDE_INT *pbitsize;
5041 HOST_WIDE_INT *pbitpos;
5042 tree *poffset;
5043 enum machine_mode *pmode;
5044 int *punsignedp;
5045 int *pvolatilep;
5046 unsigned int *palignment;
5047 {
5048 tree size_tree = 0;
5049 enum machine_mode mode = VOIDmode;
5050 tree offset = size_zero_node;
5051 tree bit_offset = bitsize_zero_node;
5052 unsigned int alignment = BIGGEST_ALIGNMENT;
5053 tree tem;
5054
5055 /* First get the mode, signedness, and size. We do this from just the
5056 outermost expression. */
5057 if (TREE_CODE (exp) == COMPONENT_REF)
5058 {
5059 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5060 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5061 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5062
5063 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5064 }
5065 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5066 {
5067 size_tree = TREE_OPERAND (exp, 1);
5068 *punsignedp = TREE_UNSIGNED (exp);
5069 }
5070 else
5071 {
5072 mode = TYPE_MODE (TREE_TYPE (exp));
5073 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5074
5075 if (mode == BLKmode)
5076 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5077 else
5078 *pbitsize = GET_MODE_BITSIZE (mode);
5079 }
5080
5081 if (size_tree != 0)
5082 {
5083 if (! host_integerp (size_tree, 1))
5084 mode = BLKmode, *pbitsize = -1;
5085 else
5086 *pbitsize = tree_low_cst (size_tree, 1);
5087 }
5088
5089 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5090 and find the ultimate containing object. */
5091 while (1)
5092 {
5093 if (TREE_CODE (exp) == BIT_FIELD_REF)
5094 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5095 else if (TREE_CODE (exp) == COMPONENT_REF)
5096 {
5097 tree field = TREE_OPERAND (exp, 1);
5098 tree this_offset = DECL_FIELD_OFFSET (field);
5099
5100 /* If this field hasn't been filled in yet, don't go
5101 past it. This should only happen when folding expressions
5102 made during type construction. */
5103 if (this_offset == 0)
5104 break;
5105 else if (! TREE_CONSTANT (this_offset)
5106 && contains_placeholder_p (this_offset))
5107 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5108
5109 offset = size_binop (PLUS_EXPR, offset, this_offset);
5110 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5111 DECL_FIELD_BIT_OFFSET (field));
5112
5113 if (! host_integerp (offset, 0))
5114 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5115 }
5116
5117 else if (TREE_CODE (exp) == ARRAY_REF)
5118 {
5119 tree index = TREE_OPERAND (exp, 1);
5120 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5121 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5122 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5123
5124 /* We assume all arrays have sizes that are a multiple of a byte.
5125 First subtract the lower bound, if any, in the type of the
5126 index, then convert to sizetype and multiply by the size of the
5127 array element. */
5128 if (low_bound != 0 && ! integer_zerop (low_bound))
5129 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5130 index, low_bound));
5131
5132 /* If the index has a self-referential type, pass it to a
5133 WITH_RECORD_EXPR; if the component size is, pass our
5134 component to one. */
5135 if (! TREE_CONSTANT (index)
5136 && contains_placeholder_p (index))
5137 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5138 if (! TREE_CONSTANT (unit_size)
5139 && contains_placeholder_p (unit_size))
5140 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5141 TREE_OPERAND (exp, 0));
5142
5143 offset = size_binop (PLUS_EXPR, offset,
5144 size_binop (MULT_EXPR,
5145 convert (sizetype, index),
5146 unit_size));
5147 }
5148
5149 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5150 && ! ((TREE_CODE (exp) == NOP_EXPR
5151 || TREE_CODE (exp) == CONVERT_EXPR)
5152 && (TYPE_MODE (TREE_TYPE (exp))
5153 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5154 break;
5155
5156 /* If any reference in the chain is volatile, the effect is volatile. */
5157 if (TREE_THIS_VOLATILE (exp))
5158 *pvolatilep = 1;
5159
5160 /* If the offset is non-constant already, then we can't assume any
5161 alignment more than the alignment here. */
5162 if (! TREE_CONSTANT (offset))
5163 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5164
5165 exp = TREE_OPERAND (exp, 0);
5166 }
5167
5168 if (DECL_P (exp))
5169 alignment = MIN (alignment, DECL_ALIGN (exp));
5170 else if (TREE_TYPE (exp) != 0)
5171 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5172
5173 /* If OFFSET is constant, see if we can return the whole thing as a
5174 constant bit position. Otherwise, split it up. */
5175 if (host_integerp (offset, 0)
5176 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5177 bitsize_unit_node))
5178 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5179 && host_integerp (tem, 0))
5180 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5181 else
5182 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5183
5184 *pmode = mode;
5185 *palignment = alignment;
5186 return exp;
5187 }
5188
5189 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5190
5191 static enum memory_use_mode
5192 get_memory_usage_from_modifier (modifier)
5193 enum expand_modifier modifier;
5194 {
5195 switch (modifier)
5196 {
5197 case EXPAND_NORMAL:
5198 case EXPAND_SUM:
5199 return MEMORY_USE_RO;
5200 break;
5201 case EXPAND_MEMORY_USE_WO:
5202 return MEMORY_USE_WO;
5203 break;
5204 case EXPAND_MEMORY_USE_RW:
5205 return MEMORY_USE_RW;
5206 break;
5207 case EXPAND_MEMORY_USE_DONT:
5208 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5209 MEMORY_USE_DONT, because they are modifiers to a call of
5210 expand_expr in the ADDR_EXPR case of expand_expr. */
5211 case EXPAND_CONST_ADDRESS:
5212 case EXPAND_INITIALIZER:
5213 return MEMORY_USE_DONT;
5214 case EXPAND_MEMORY_USE_BAD:
5215 default:
5216 abort ();
5217 }
5218 }
5219 \f
5220 /* Given an rtx VALUE that may contain additions and multiplications,
5221 return an equivalent value that just refers to a register or memory.
5222 This is done by generating instructions to perform the arithmetic
5223 and returning a pseudo-register containing the value.
5224
5225 The returned value may be a REG, SUBREG, MEM or constant. */
5226
5227 rtx
5228 force_operand (value, target)
5229 rtx value, target;
5230 {
5231 register optab binoptab = 0;
5232 /* Use a temporary to force order of execution of calls to
5233 `force_operand'. */
5234 rtx tmp;
5235 register rtx op2;
5236 /* Use subtarget as the target for operand 0 of a binary operation. */
5237 register rtx subtarget = get_subtarget (target);
5238
5239 /* Check for a PIC address load. */
5240 if (flag_pic
5241 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5242 && XEXP (value, 0) == pic_offset_table_rtx
5243 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5244 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5245 || GET_CODE (XEXP (value, 1)) == CONST))
5246 {
5247 if (!subtarget)
5248 subtarget = gen_reg_rtx (GET_MODE (value));
5249 emit_move_insn (subtarget, value);
5250 return subtarget;
5251 }
5252
5253 if (GET_CODE (value) == PLUS)
5254 binoptab = add_optab;
5255 else if (GET_CODE (value) == MINUS)
5256 binoptab = sub_optab;
5257 else if (GET_CODE (value) == MULT)
5258 {
5259 op2 = XEXP (value, 1);
5260 if (!CONSTANT_P (op2)
5261 && !(GET_CODE (op2) == REG && op2 != subtarget))
5262 subtarget = 0;
5263 tmp = force_operand (XEXP (value, 0), subtarget);
5264 return expand_mult (GET_MODE (value), tmp,
5265 force_operand (op2, NULL_RTX),
5266 target, 0);
5267 }
5268
5269 if (binoptab)
5270 {
5271 op2 = XEXP (value, 1);
5272 if (!CONSTANT_P (op2)
5273 && !(GET_CODE (op2) == REG && op2 != subtarget))
5274 subtarget = 0;
5275 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5276 {
5277 binoptab = add_optab;
5278 op2 = negate_rtx (GET_MODE (value), op2);
5279 }
5280
5281 /* Check for an addition with OP2 a constant integer and our first
5282 operand a PLUS of a virtual register and something else. In that
5283 case, we want to emit the sum of the virtual register and the
5284 constant first and then add the other value. This allows virtual
5285 register instantiation to simply modify the constant rather than
5286 creating another one around this addition. */
5287 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5288 && GET_CODE (XEXP (value, 0)) == PLUS
5289 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5290 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5291 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5292 {
5293 rtx temp = expand_binop (GET_MODE (value), binoptab,
5294 XEXP (XEXP (value, 0), 0), op2,
5295 subtarget, 0, OPTAB_LIB_WIDEN);
5296 return expand_binop (GET_MODE (value), binoptab, temp,
5297 force_operand (XEXP (XEXP (value, 0), 1), 0),
5298 target, 0, OPTAB_LIB_WIDEN);
5299 }
5300
5301 tmp = force_operand (XEXP (value, 0), subtarget);
5302 return expand_binop (GET_MODE (value), binoptab, tmp,
5303 force_operand (op2, NULL_RTX),
5304 target, 0, OPTAB_LIB_WIDEN);
5305 /* We give UNSIGNEDP = 0 to expand_binop
5306 because the only operations we are expanding here are signed ones. */
5307 }
5308 return value;
5309 }
5310 \f
5311 /* Subroutine of expand_expr:
5312 save the non-copied parts (LIST) of an expr (LHS), and return a list
5313 which can restore these values to their previous values,
5314 should something modify their storage. */
5315
5316 static tree
5317 save_noncopied_parts (lhs, list)
5318 tree lhs;
5319 tree list;
5320 {
5321 tree tail;
5322 tree parts = 0;
5323
5324 for (tail = list; tail; tail = TREE_CHAIN (tail))
5325 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5326 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5327 else
5328 {
5329 tree part = TREE_VALUE (tail);
5330 tree part_type = TREE_TYPE (part);
5331 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5332 rtx target = assign_temp (part_type, 0, 1, 1);
5333 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5334 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5335 parts = tree_cons (to_be_saved,
5336 build (RTL_EXPR, part_type, NULL_TREE,
5337 (tree) target),
5338 parts);
5339 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5340 }
5341 return parts;
5342 }
5343
5344 /* Subroutine of expand_expr:
5345 record the non-copied parts (LIST) of an expr (LHS), and return a list
5346 which specifies the initial values of these parts. */
5347
5348 static tree
5349 init_noncopied_parts (lhs, list)
5350 tree lhs;
5351 tree list;
5352 {
5353 tree tail;
5354 tree parts = 0;
5355
5356 for (tail = list; tail; tail = TREE_CHAIN (tail))
5357 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5358 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5359 else if (TREE_PURPOSE (tail))
5360 {
5361 tree part = TREE_VALUE (tail);
5362 tree part_type = TREE_TYPE (part);
5363 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5364 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5365 }
5366 return parts;
5367 }
5368
5369 /* Subroutine of expand_expr: return nonzero iff there is no way that
5370 EXP can reference X, which is being modified. TOP_P is nonzero if this
5371 call is going to be used to determine whether we need a temporary
5372 for EXP, as opposed to a recursive call to this function.
5373
5374 It is always safe for this routine to return zero since it merely
5375 searches for optimization opportunities. */
5376
5377 static int
5378 safe_from_p (x, exp, top_p)
5379 rtx x;
5380 tree exp;
5381 int top_p;
5382 {
5383 rtx exp_rtl = 0;
5384 int i, nops;
5385 static int save_expr_count;
5386 static int save_expr_size = 0;
5387 static tree *save_expr_rewritten;
5388 static tree save_expr_trees[256];
5389
5390 if (x == 0
5391 /* If EXP has varying size, we MUST use a target since we currently
5392 have no way of allocating temporaries of variable size
5393 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5394 So we assume here that something at a higher level has prevented a
5395 clash. This is somewhat bogus, but the best we can do. Only
5396 do this when X is BLKmode and when we are at the top level. */
5397 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5398 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5399 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5400 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5401 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5402 != INTEGER_CST)
5403 && GET_MODE (x) == BLKmode))
5404 return 1;
5405
5406 if (top_p && save_expr_size == 0)
5407 {
5408 int rtn;
5409
5410 save_expr_count = 0;
5411 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5412 save_expr_rewritten = &save_expr_trees[0];
5413
5414 rtn = safe_from_p (x, exp, 1);
5415
5416 for (i = 0; i < save_expr_count; ++i)
5417 {
5418 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5419 abort ();
5420 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5421 }
5422
5423 save_expr_size = 0;
5424
5425 return rtn;
5426 }
5427
5428 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5429 find the underlying pseudo. */
5430 if (GET_CODE (x) == SUBREG)
5431 {
5432 x = SUBREG_REG (x);
5433 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5434 return 0;
5435 }
5436
5437 /* If X is a location in the outgoing argument area, it is always safe. */
5438 if (GET_CODE (x) == MEM
5439 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5440 || (GET_CODE (XEXP (x, 0)) == PLUS
5441 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5442 return 1;
5443
5444 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5445 {
5446 case 'd':
5447 exp_rtl = DECL_RTL (exp);
5448 break;
5449
5450 case 'c':
5451 return 1;
5452
5453 case 'x':
5454 if (TREE_CODE (exp) == TREE_LIST)
5455 return ((TREE_VALUE (exp) == 0
5456 || safe_from_p (x, TREE_VALUE (exp), 0))
5457 && (TREE_CHAIN (exp) == 0
5458 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5459 else if (TREE_CODE (exp) == ERROR_MARK)
5460 return 1; /* An already-visited SAVE_EXPR? */
5461 else
5462 return 0;
5463
5464 case '1':
5465 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5466
5467 case '2':
5468 case '<':
5469 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5470 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5471
5472 case 'e':
5473 case 'r':
5474 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5475 the expression. If it is set, we conflict iff we are that rtx or
5476 both are in memory. Otherwise, we check all operands of the
5477 expression recursively. */
5478
5479 switch (TREE_CODE (exp))
5480 {
5481 case ADDR_EXPR:
5482 return (staticp (TREE_OPERAND (exp, 0))
5483 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5484 || TREE_STATIC (exp));
5485
5486 case INDIRECT_REF:
5487 if (GET_CODE (x) == MEM)
5488 return 0;
5489 break;
5490
5491 case CALL_EXPR:
5492 exp_rtl = CALL_EXPR_RTL (exp);
5493 if (exp_rtl == 0)
5494 {
5495 /* Assume that the call will clobber all hard registers and
5496 all of memory. */
5497 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5498 || GET_CODE (x) == MEM)
5499 return 0;
5500 }
5501
5502 break;
5503
5504 case RTL_EXPR:
5505 /* If a sequence exists, we would have to scan every instruction
5506 in the sequence to see if it was safe. This is probably not
5507 worthwhile. */
5508 if (RTL_EXPR_SEQUENCE (exp))
5509 return 0;
5510
5511 exp_rtl = RTL_EXPR_RTL (exp);
5512 break;
5513
5514 case WITH_CLEANUP_EXPR:
5515 exp_rtl = RTL_EXPR_RTL (exp);
5516 break;
5517
5518 case CLEANUP_POINT_EXPR:
5519 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5520
5521 case SAVE_EXPR:
5522 exp_rtl = SAVE_EXPR_RTL (exp);
5523 if (exp_rtl)
5524 break;
5525
5526 /* This SAVE_EXPR might appear many times in the top-level
5527 safe_from_p() expression, and if it has a complex
5528 subexpression, examining it multiple times could result
5529 in a combinatorial explosion. E.g. on an Alpha
5530 running at least 200MHz, a Fortran test case compiled with
5531 optimization took about 28 minutes to compile -- even though
5532 it was only a few lines long, and the complicated line causing
5533 so much time to be spent in the earlier version of safe_from_p()
5534 had only 293 or so unique nodes.
5535
5536 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5537 where it is so we can turn it back in the top-level safe_from_p()
5538 when we're done. */
5539
5540 /* For now, don't bother re-sizing the array. */
5541 if (save_expr_count >= save_expr_size)
5542 return 0;
5543 save_expr_rewritten[save_expr_count++] = exp;
5544
5545 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5546 for (i = 0; i < nops; i++)
5547 {
5548 tree operand = TREE_OPERAND (exp, i);
5549 if (operand == NULL_TREE)
5550 continue;
5551 TREE_SET_CODE (exp, ERROR_MARK);
5552 if (!safe_from_p (x, operand, 0))
5553 return 0;
5554 TREE_SET_CODE (exp, SAVE_EXPR);
5555 }
5556 TREE_SET_CODE (exp, ERROR_MARK);
5557 return 1;
5558
5559 case BIND_EXPR:
5560 /* The only operand we look at is operand 1. The rest aren't
5561 part of the expression. */
5562 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5563
5564 case METHOD_CALL_EXPR:
5565 /* This takes a rtx argument, but shouldn't appear here. */
5566 abort ();
5567
5568 default:
5569 break;
5570 }
5571
5572 /* If we have an rtx, we do not need to scan our operands. */
5573 if (exp_rtl)
5574 break;
5575
5576 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5577 for (i = 0; i < nops; i++)
5578 if (TREE_OPERAND (exp, i) != 0
5579 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5580 return 0;
5581 }
5582
5583 /* If we have an rtl, find any enclosed object. Then see if we conflict
5584 with it. */
5585 if (exp_rtl)
5586 {
5587 if (GET_CODE (exp_rtl) == SUBREG)
5588 {
5589 exp_rtl = SUBREG_REG (exp_rtl);
5590 if (GET_CODE (exp_rtl) == REG
5591 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5592 return 0;
5593 }
5594
5595 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5596 are memory and EXP is not readonly. */
5597 return ! (rtx_equal_p (x, exp_rtl)
5598 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5599 && ! TREE_READONLY (exp)));
5600 }
5601
5602 /* If we reach here, it is safe. */
5603 return 1;
5604 }
5605
5606 /* Subroutine of expand_expr: return nonzero iff EXP is an
5607 expression whose type is statically determinable. */
5608
5609 static int
5610 fixed_type_p (exp)
5611 tree exp;
5612 {
5613 if (TREE_CODE (exp) == PARM_DECL
5614 || TREE_CODE (exp) == VAR_DECL
5615 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5616 || TREE_CODE (exp) == COMPONENT_REF
5617 || TREE_CODE (exp) == ARRAY_REF)
5618 return 1;
5619 return 0;
5620 }
5621
5622 /* Subroutine of expand_expr: return rtx if EXP is a
5623 variable or parameter; else return 0. */
5624
5625 static rtx
5626 var_rtx (exp)
5627 tree exp;
5628 {
5629 STRIP_NOPS (exp);
5630 switch (TREE_CODE (exp))
5631 {
5632 case PARM_DECL:
5633 case VAR_DECL:
5634 return DECL_RTL (exp);
5635 default:
5636 return 0;
5637 }
5638 }
5639
5640 #ifdef MAX_INTEGER_COMPUTATION_MODE
5641 void
5642 check_max_integer_computation_mode (exp)
5643 tree exp;
5644 {
5645 enum tree_code code;
5646 enum machine_mode mode;
5647
5648 /* Strip any NOPs that don't change the mode. */
5649 STRIP_NOPS (exp);
5650 code = TREE_CODE (exp);
5651
5652 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5653 if (code == NOP_EXPR
5654 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5655 return;
5656
5657 /* First check the type of the overall operation. We need only look at
5658 unary, binary and relational operations. */
5659 if (TREE_CODE_CLASS (code) == '1'
5660 || TREE_CODE_CLASS (code) == '2'
5661 || TREE_CODE_CLASS (code) == '<')
5662 {
5663 mode = TYPE_MODE (TREE_TYPE (exp));
5664 if (GET_MODE_CLASS (mode) == MODE_INT
5665 && mode > MAX_INTEGER_COMPUTATION_MODE)
5666 fatal ("unsupported wide integer operation");
5667 }
5668
5669 /* Check operand of a unary op. */
5670 if (TREE_CODE_CLASS (code) == '1')
5671 {
5672 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5673 if (GET_MODE_CLASS (mode) == MODE_INT
5674 && mode > MAX_INTEGER_COMPUTATION_MODE)
5675 fatal ("unsupported wide integer operation");
5676 }
5677
5678 /* Check operands of a binary/comparison op. */
5679 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5680 {
5681 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5682 if (GET_MODE_CLASS (mode) == MODE_INT
5683 && mode > MAX_INTEGER_COMPUTATION_MODE)
5684 fatal ("unsupported wide integer operation");
5685
5686 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5687 if (GET_MODE_CLASS (mode) == MODE_INT
5688 && mode > MAX_INTEGER_COMPUTATION_MODE)
5689 fatal ("unsupported wide integer operation");
5690 }
5691 }
5692 #endif
5693 \f
5694 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5695 has any readonly fields. If any of the fields have types that
5696 contain readonly fields, return true as well. */
5697
5698 static int
5699 readonly_fields_p (type)
5700 tree type;
5701 {
5702 tree field;
5703
5704 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5705 if (TREE_CODE (field) == FIELD_DECL
5706 && (TREE_READONLY (field)
5707 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5708 && readonly_fields_p (TREE_TYPE (field)))))
5709 return 1;
5710
5711 return 0;
5712 }
5713 \f
5714 /* expand_expr: generate code for computing expression EXP.
5715 An rtx for the computed value is returned. The value is never null.
5716 In the case of a void EXP, const0_rtx is returned.
5717
5718 The value may be stored in TARGET if TARGET is nonzero.
5719 TARGET is just a suggestion; callers must assume that
5720 the rtx returned may not be the same as TARGET.
5721
5722 If TARGET is CONST0_RTX, it means that the value will be ignored.
5723
5724 If TMODE is not VOIDmode, it suggests generating the
5725 result in mode TMODE. But this is done only when convenient.
5726 Otherwise, TMODE is ignored and the value generated in its natural mode.
5727 TMODE is just a suggestion; callers must assume that
5728 the rtx returned may not have mode TMODE.
5729
5730 Note that TARGET may have neither TMODE nor MODE. In that case, it
5731 probably will not be used.
5732
5733 If MODIFIER is EXPAND_SUM then when EXP is an addition
5734 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5735 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5736 products as above, or REG or MEM, or constant.
5737 Ordinarily in such cases we would output mul or add instructions
5738 and then return a pseudo reg containing the sum.
5739
5740 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5741 it also marks a label as absolutely required (it can't be dead).
5742 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5743 This is used for outputting expressions used in initializers.
5744
5745 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5746 with a constant address even if that address is not normally legitimate.
5747 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5748
5749 rtx
5750 expand_expr (exp, target, tmode, modifier)
5751 register tree exp;
5752 rtx target;
5753 enum machine_mode tmode;
5754 enum expand_modifier modifier;
5755 {
5756 register rtx op0, op1, temp;
5757 tree type = TREE_TYPE (exp);
5758 int unsignedp = TREE_UNSIGNED (type);
5759 register enum machine_mode mode;
5760 register enum tree_code code = TREE_CODE (exp);
5761 optab this_optab;
5762 rtx subtarget, original_target;
5763 int ignore;
5764 tree context;
5765 /* Used by check-memory-usage to make modifier read only. */
5766 enum expand_modifier ro_modifier;
5767
5768 /* Handle ERROR_MARK before anybody tries to access its type. */
5769 if (TREE_CODE (exp) == ERROR_MARK)
5770 {
5771 op0 = CONST0_RTX (tmode);
5772 if (op0 != 0)
5773 return op0;
5774 return const0_rtx;
5775 }
5776
5777 mode = TYPE_MODE (type);
5778 /* Use subtarget as the target for operand 0 of a binary operation. */
5779 subtarget = get_subtarget (target);
5780 original_target = target;
5781 ignore = (target == const0_rtx
5782 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5783 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5784 || code == COND_EXPR)
5785 && TREE_CODE (type) == VOID_TYPE));
5786
5787 /* Make a read-only version of the modifier. */
5788 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5789 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5790 ro_modifier = modifier;
5791 else
5792 ro_modifier = EXPAND_NORMAL;
5793
5794 /* If we are going to ignore this result, we need only do something
5795 if there is a side-effect somewhere in the expression. If there
5796 is, short-circuit the most common cases here. Note that we must
5797 not call expand_expr with anything but const0_rtx in case this
5798 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5799
5800 if (ignore)
5801 {
5802 if (! TREE_SIDE_EFFECTS (exp))
5803 return const0_rtx;
5804
5805 /* Ensure we reference a volatile object even if value is ignored, but
5806 don't do this if all we are doing is taking its address. */
5807 if (TREE_THIS_VOLATILE (exp)
5808 && TREE_CODE (exp) != FUNCTION_DECL
5809 && mode != VOIDmode && mode != BLKmode
5810 && modifier != EXPAND_CONST_ADDRESS)
5811 {
5812 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5813 if (GET_CODE (temp) == MEM)
5814 temp = copy_to_reg (temp);
5815 return const0_rtx;
5816 }
5817
5818 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5819 || code == INDIRECT_REF || code == BUFFER_REF)
5820 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5821 VOIDmode, ro_modifier);
5822 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5823 || code == ARRAY_REF)
5824 {
5825 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5826 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5827 return const0_rtx;
5828 }
5829 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5830 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5831 /* If the second operand has no side effects, just evaluate
5832 the first. */
5833 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5834 VOIDmode, ro_modifier);
5835 else if (code == BIT_FIELD_REF)
5836 {
5837 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5838 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5839 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5840 return const0_rtx;
5841 }
5842 ;
5843 target = 0;
5844 }
5845
5846 #ifdef MAX_INTEGER_COMPUTATION_MODE
5847 /* Only check stuff here if the mode we want is different from the mode
5848 of the expression; if it's the same, check_max_integer_computiation_mode
5849 will handle it. Do we really need to check this stuff at all? */
5850
5851 if (target
5852 && GET_MODE (target) != mode
5853 && TREE_CODE (exp) != INTEGER_CST
5854 && TREE_CODE (exp) != PARM_DECL
5855 && TREE_CODE (exp) != ARRAY_REF
5856 && TREE_CODE (exp) != COMPONENT_REF
5857 && TREE_CODE (exp) != BIT_FIELD_REF
5858 && TREE_CODE (exp) != INDIRECT_REF
5859 && TREE_CODE (exp) != CALL_EXPR
5860 && TREE_CODE (exp) != VAR_DECL
5861 && TREE_CODE (exp) != RTL_EXPR)
5862 {
5863 enum machine_mode mode = GET_MODE (target);
5864
5865 if (GET_MODE_CLASS (mode) == MODE_INT
5866 && mode > MAX_INTEGER_COMPUTATION_MODE)
5867 fatal ("unsupported wide integer operation");
5868 }
5869
5870 if (tmode != mode
5871 && TREE_CODE (exp) != INTEGER_CST
5872 && TREE_CODE (exp) != PARM_DECL
5873 && TREE_CODE (exp) != ARRAY_REF
5874 && TREE_CODE (exp) != COMPONENT_REF
5875 && TREE_CODE (exp) != BIT_FIELD_REF
5876 && TREE_CODE (exp) != INDIRECT_REF
5877 && TREE_CODE (exp) != VAR_DECL
5878 && TREE_CODE (exp) != CALL_EXPR
5879 && TREE_CODE (exp) != RTL_EXPR
5880 && GET_MODE_CLASS (tmode) == MODE_INT
5881 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5882 fatal ("unsupported wide integer operation");
5883
5884 check_max_integer_computation_mode (exp);
5885 #endif
5886
5887 /* If will do cse, generate all results into pseudo registers
5888 since 1) that allows cse to find more things
5889 and 2) otherwise cse could produce an insn the machine
5890 cannot support. */
5891
5892 if (! cse_not_expected && mode != BLKmode && target
5893 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5894 target = subtarget;
5895
5896 switch (code)
5897 {
5898 case LABEL_DECL:
5899 {
5900 tree function = decl_function_context (exp);
5901 /* Handle using a label in a containing function. */
5902 if (function != current_function_decl
5903 && function != inline_function_decl && function != 0)
5904 {
5905 struct function *p = find_function_data (function);
5906 /* Allocate in the memory associated with the function
5907 that the label is in. */
5908 push_obstacks (p->function_obstack,
5909 p->function_maybepermanent_obstack);
5910
5911 p->expr->x_forced_labels
5912 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5913 p->expr->x_forced_labels);
5914 pop_obstacks ();
5915 }
5916 else
5917 {
5918 if (modifier == EXPAND_INITIALIZER)
5919 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5920 label_rtx (exp),
5921 forced_labels);
5922 }
5923
5924 temp = gen_rtx_MEM (FUNCTION_MODE,
5925 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5926 if (function != current_function_decl
5927 && function != inline_function_decl && function != 0)
5928 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5929 return temp;
5930 }
5931
5932 case PARM_DECL:
5933 if (DECL_RTL (exp) == 0)
5934 {
5935 error_with_decl (exp, "prior parameter's size depends on `%s'");
5936 return CONST0_RTX (mode);
5937 }
5938
5939 /* ... fall through ... */
5940
5941 case VAR_DECL:
5942 /* If a static var's type was incomplete when the decl was written,
5943 but the type is complete now, lay out the decl now. */
5944 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5945 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5946 {
5947 push_obstacks_nochange ();
5948 end_temporary_allocation ();
5949 layout_decl (exp, 0);
5950 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5951 pop_obstacks ();
5952 }
5953
5954 /* Although static-storage variables start off initialized, according to
5955 ANSI C, a memcpy could overwrite them with uninitialized values. So
5956 we check them too. This also lets us check for read-only variables
5957 accessed via a non-const declaration, in case it won't be detected
5958 any other way (e.g., in an embedded system or OS kernel without
5959 memory protection).
5960
5961 Aggregates are not checked here; they're handled elsewhere. */
5962 if (cfun && current_function_check_memory_usage
5963 && code == VAR_DECL
5964 && GET_CODE (DECL_RTL (exp)) == MEM
5965 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5966 {
5967 enum memory_use_mode memory_usage;
5968 memory_usage = get_memory_usage_from_modifier (modifier);
5969
5970 in_check_memory_usage = 1;
5971 if (memory_usage != MEMORY_USE_DONT)
5972 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5973 XEXP (DECL_RTL (exp), 0), Pmode,
5974 GEN_INT (int_size_in_bytes (type)),
5975 TYPE_MODE (sizetype),
5976 GEN_INT (memory_usage),
5977 TYPE_MODE (integer_type_node));
5978 in_check_memory_usage = 0;
5979 }
5980
5981 /* ... fall through ... */
5982
5983 case FUNCTION_DECL:
5984 case RESULT_DECL:
5985 if (DECL_RTL (exp) == 0)
5986 abort ();
5987
5988 /* Ensure variable marked as used even if it doesn't go through
5989 a parser. If it hasn't be used yet, write out an external
5990 definition. */
5991 if (! TREE_USED (exp))
5992 {
5993 assemble_external (exp);
5994 TREE_USED (exp) = 1;
5995 }
5996
5997 /* Show we haven't gotten RTL for this yet. */
5998 temp = 0;
5999
6000 /* Handle variables inherited from containing functions. */
6001 context = decl_function_context (exp);
6002
6003 /* We treat inline_function_decl as an alias for the current function
6004 because that is the inline function whose vars, types, etc.
6005 are being merged into the current function.
6006 See expand_inline_function. */
6007
6008 if (context != 0 && context != current_function_decl
6009 && context != inline_function_decl
6010 /* If var is static, we don't need a static chain to access it. */
6011 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6012 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6013 {
6014 rtx addr;
6015
6016 /* Mark as non-local and addressable. */
6017 DECL_NONLOCAL (exp) = 1;
6018 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6019 abort ();
6020 mark_addressable (exp);
6021 if (GET_CODE (DECL_RTL (exp)) != MEM)
6022 abort ();
6023 addr = XEXP (DECL_RTL (exp), 0);
6024 if (GET_CODE (addr) == MEM)
6025 addr = change_address (addr, Pmode,
6026 fix_lexical_addr (XEXP (addr, 0), exp));
6027 else
6028 addr = fix_lexical_addr (addr, exp);
6029
6030 temp = change_address (DECL_RTL (exp), mode, addr);
6031 }
6032
6033 /* This is the case of an array whose size is to be determined
6034 from its initializer, while the initializer is still being parsed.
6035 See expand_decl. */
6036
6037 else if (GET_CODE (DECL_RTL (exp)) == MEM
6038 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6039 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6040 XEXP (DECL_RTL (exp), 0));
6041
6042 /* If DECL_RTL is memory, we are in the normal case and either
6043 the address is not valid or it is not a register and -fforce-addr
6044 is specified, get the address into a register. */
6045
6046 else if (GET_CODE (DECL_RTL (exp)) == MEM
6047 && modifier != EXPAND_CONST_ADDRESS
6048 && modifier != EXPAND_SUM
6049 && modifier != EXPAND_INITIALIZER
6050 && (! memory_address_p (DECL_MODE (exp),
6051 XEXP (DECL_RTL (exp), 0))
6052 || (flag_force_addr
6053 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6054 temp = change_address (DECL_RTL (exp), VOIDmode,
6055 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6056
6057 /* If we got something, return it. But first, set the alignment
6058 the address is a register. */
6059 if (temp != 0)
6060 {
6061 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6062 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6063
6064 return temp;
6065 }
6066
6067 /* If the mode of DECL_RTL does not match that of the decl, it
6068 must be a promoted value. We return a SUBREG of the wanted mode,
6069 but mark it so that we know that it was already extended. */
6070
6071 if (GET_CODE (DECL_RTL (exp)) == REG
6072 && GET_MODE (DECL_RTL (exp)) != mode)
6073 {
6074 /* Get the signedness used for this variable. Ensure we get the
6075 same mode we got when the variable was declared. */
6076 if (GET_MODE (DECL_RTL (exp))
6077 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6078 abort ();
6079
6080 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6081 SUBREG_PROMOTED_VAR_P (temp) = 1;
6082 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6083 return temp;
6084 }
6085
6086 return DECL_RTL (exp);
6087
6088 case INTEGER_CST:
6089 return immed_double_const (TREE_INT_CST_LOW (exp),
6090 TREE_INT_CST_HIGH (exp), mode);
6091
6092 case CONST_DECL:
6093 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6094 EXPAND_MEMORY_USE_BAD);
6095
6096 case REAL_CST:
6097 /* If optimized, generate immediate CONST_DOUBLE
6098 which will be turned into memory by reload if necessary.
6099
6100 We used to force a register so that loop.c could see it. But
6101 this does not allow gen_* patterns to perform optimizations with
6102 the constants. It also produces two insns in cases like "x = 1.0;".
6103 On most machines, floating-point constants are not permitted in
6104 many insns, so we'd end up copying it to a register in any case.
6105
6106 Now, we do the copying in expand_binop, if appropriate. */
6107 return immed_real_const (exp);
6108
6109 case COMPLEX_CST:
6110 case STRING_CST:
6111 if (! TREE_CST_RTL (exp))
6112 output_constant_def (exp);
6113
6114 /* TREE_CST_RTL probably contains a constant address.
6115 On RISC machines where a constant address isn't valid,
6116 make some insns to get that address into a register. */
6117 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6118 && modifier != EXPAND_CONST_ADDRESS
6119 && modifier != EXPAND_INITIALIZER
6120 && modifier != EXPAND_SUM
6121 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6122 || (flag_force_addr
6123 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6124 return change_address (TREE_CST_RTL (exp), VOIDmode,
6125 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6126 return TREE_CST_RTL (exp);
6127
6128 case EXPR_WITH_FILE_LOCATION:
6129 {
6130 rtx to_return;
6131 const char *saved_input_filename = input_filename;
6132 int saved_lineno = lineno;
6133 input_filename = EXPR_WFL_FILENAME (exp);
6134 lineno = EXPR_WFL_LINENO (exp);
6135 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6136 emit_line_note (input_filename, lineno);
6137 /* Possibly avoid switching back and force here. */
6138 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6139 input_filename = saved_input_filename;
6140 lineno = saved_lineno;
6141 return to_return;
6142 }
6143
6144 case SAVE_EXPR:
6145 context = decl_function_context (exp);
6146
6147 /* If this SAVE_EXPR was at global context, assume we are an
6148 initialization function and move it into our context. */
6149 if (context == 0)
6150 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6151
6152 /* We treat inline_function_decl as an alias for the current function
6153 because that is the inline function whose vars, types, etc.
6154 are being merged into the current function.
6155 See expand_inline_function. */
6156 if (context == current_function_decl || context == inline_function_decl)
6157 context = 0;
6158
6159 /* If this is non-local, handle it. */
6160 if (context)
6161 {
6162 /* The following call just exists to abort if the context is
6163 not of a containing function. */
6164 find_function_data (context);
6165
6166 temp = SAVE_EXPR_RTL (exp);
6167 if (temp && GET_CODE (temp) == REG)
6168 {
6169 put_var_into_stack (exp);
6170 temp = SAVE_EXPR_RTL (exp);
6171 }
6172 if (temp == 0 || GET_CODE (temp) != MEM)
6173 abort ();
6174 return change_address (temp, mode,
6175 fix_lexical_addr (XEXP (temp, 0), exp));
6176 }
6177 if (SAVE_EXPR_RTL (exp) == 0)
6178 {
6179 if (mode == VOIDmode)
6180 temp = const0_rtx;
6181 else
6182 temp = assign_temp (type, 3, 0, 0);
6183
6184 SAVE_EXPR_RTL (exp) = temp;
6185 if (!optimize && GET_CODE (temp) == REG)
6186 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6187 save_expr_regs);
6188
6189 /* If the mode of TEMP does not match that of the expression, it
6190 must be a promoted value. We pass store_expr a SUBREG of the
6191 wanted mode but mark it so that we know that it was already
6192 extended. Note that `unsignedp' was modified above in
6193 this case. */
6194
6195 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6196 {
6197 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6198 SUBREG_PROMOTED_VAR_P (temp) = 1;
6199 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6200 }
6201
6202 if (temp == const0_rtx)
6203 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6204 EXPAND_MEMORY_USE_BAD);
6205 else
6206 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6207
6208 TREE_USED (exp) = 1;
6209 }
6210
6211 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6212 must be a promoted value. We return a SUBREG of the wanted mode,
6213 but mark it so that we know that it was already extended. */
6214
6215 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6216 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6217 {
6218 /* Compute the signedness and make the proper SUBREG. */
6219 promote_mode (type, mode, &unsignedp, 0);
6220 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6221 SUBREG_PROMOTED_VAR_P (temp) = 1;
6222 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6223 return temp;
6224 }
6225
6226 return SAVE_EXPR_RTL (exp);
6227
6228 case UNSAVE_EXPR:
6229 {
6230 rtx temp;
6231 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6232 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6233 return temp;
6234 }
6235
6236 case PLACEHOLDER_EXPR:
6237 {
6238 tree placeholder_expr;
6239
6240 /* If there is an object on the head of the placeholder list,
6241 see if some object in it of type TYPE or a pointer to it. For
6242 further information, see tree.def. */
6243 for (placeholder_expr = placeholder_list;
6244 placeholder_expr != 0;
6245 placeholder_expr = TREE_CHAIN (placeholder_expr))
6246 {
6247 tree need_type = TYPE_MAIN_VARIANT (type);
6248 tree object = 0;
6249 tree old_list = placeholder_list;
6250 tree elt;
6251
6252 /* Find the outermost reference that is of the type we want.
6253 If none, see if any object has a type that is a pointer to
6254 the type we want. */
6255 for (elt = TREE_PURPOSE (placeholder_expr);
6256 elt != 0 && object == 0;
6257 elt
6258 = ((TREE_CODE (elt) == COMPOUND_EXPR
6259 || TREE_CODE (elt) == COND_EXPR)
6260 ? TREE_OPERAND (elt, 1)
6261 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6262 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6263 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6264 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6265 ? TREE_OPERAND (elt, 0) : 0))
6266 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6267 object = elt;
6268
6269 for (elt = TREE_PURPOSE (placeholder_expr);
6270 elt != 0 && object == 0;
6271 elt
6272 = ((TREE_CODE (elt) == COMPOUND_EXPR
6273 || TREE_CODE (elt) == COND_EXPR)
6274 ? TREE_OPERAND (elt, 1)
6275 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6276 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6277 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6278 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6279 ? TREE_OPERAND (elt, 0) : 0))
6280 if (POINTER_TYPE_P (TREE_TYPE (elt))
6281 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6282 == need_type))
6283 object = build1 (INDIRECT_REF, need_type, elt);
6284
6285 if (object != 0)
6286 {
6287 /* Expand this object skipping the list entries before
6288 it was found in case it is also a PLACEHOLDER_EXPR.
6289 In that case, we want to translate it using subsequent
6290 entries. */
6291 placeholder_list = TREE_CHAIN (placeholder_expr);
6292 temp = expand_expr (object, original_target, tmode,
6293 ro_modifier);
6294 placeholder_list = old_list;
6295 return temp;
6296 }
6297 }
6298 }
6299
6300 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6301 abort ();
6302
6303 case WITH_RECORD_EXPR:
6304 /* Put the object on the placeholder list, expand our first operand,
6305 and pop the list. */
6306 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6307 placeholder_list);
6308 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6309 tmode, ro_modifier);
6310 placeholder_list = TREE_CHAIN (placeholder_list);
6311 return target;
6312
6313 case GOTO_EXPR:
6314 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6315 expand_goto (TREE_OPERAND (exp, 0));
6316 else
6317 expand_computed_goto (TREE_OPERAND (exp, 0));
6318 return const0_rtx;
6319
6320 case EXIT_EXPR:
6321 expand_exit_loop_if_false (NULL_PTR,
6322 invert_truthvalue (TREE_OPERAND (exp, 0)));
6323 return const0_rtx;
6324
6325 case LABELED_BLOCK_EXPR:
6326 if (LABELED_BLOCK_BODY (exp))
6327 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6328 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6329 return const0_rtx;
6330
6331 case EXIT_BLOCK_EXPR:
6332 if (EXIT_BLOCK_RETURN (exp))
6333 sorry ("returned value in block_exit_expr");
6334 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6335 return const0_rtx;
6336
6337 case LOOP_EXPR:
6338 push_temp_slots ();
6339 expand_start_loop (1);
6340 expand_expr_stmt (TREE_OPERAND (exp, 0));
6341 expand_end_loop ();
6342 pop_temp_slots ();
6343
6344 return const0_rtx;
6345
6346 case BIND_EXPR:
6347 {
6348 tree vars = TREE_OPERAND (exp, 0);
6349 int vars_need_expansion = 0;
6350
6351 /* Need to open a binding contour here because
6352 if there are any cleanups they must be contained here. */
6353 expand_start_bindings (2);
6354
6355 /* Mark the corresponding BLOCK for output in its proper place. */
6356 if (TREE_OPERAND (exp, 2) != 0
6357 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6358 insert_block (TREE_OPERAND (exp, 2));
6359
6360 /* If VARS have not yet been expanded, expand them now. */
6361 while (vars)
6362 {
6363 if (DECL_RTL (vars) == 0)
6364 {
6365 vars_need_expansion = 1;
6366 expand_decl (vars);
6367 }
6368 expand_decl_init (vars);
6369 vars = TREE_CHAIN (vars);
6370 }
6371
6372 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6373
6374 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6375
6376 return temp;
6377 }
6378
6379 case RTL_EXPR:
6380 if (RTL_EXPR_SEQUENCE (exp))
6381 {
6382 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6383 abort ();
6384 emit_insns (RTL_EXPR_SEQUENCE (exp));
6385 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6386 }
6387 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6388 free_temps_for_rtl_expr (exp);
6389 return RTL_EXPR_RTL (exp);
6390
6391 case CONSTRUCTOR:
6392 /* If we don't need the result, just ensure we evaluate any
6393 subexpressions. */
6394 if (ignore)
6395 {
6396 tree elt;
6397 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6398 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6399 EXPAND_MEMORY_USE_BAD);
6400 return const0_rtx;
6401 }
6402
6403 /* All elts simple constants => refer to a constant in memory. But
6404 if this is a non-BLKmode mode, let it store a field at a time
6405 since that should make a CONST_INT or CONST_DOUBLE when we
6406 fold. Likewise, if we have a target we can use, it is best to
6407 store directly into the target unless the type is large enough
6408 that memcpy will be used. If we are making an initializer and
6409 all operands are constant, put it in memory as well. */
6410 else if ((TREE_STATIC (exp)
6411 && ((mode == BLKmode
6412 && ! (target != 0 && safe_from_p (target, exp, 1)))
6413 || TREE_ADDRESSABLE (exp)
6414 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6415 && (! MOVE_BY_PIECES_P
6416 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6417 TYPE_ALIGN (type)))
6418 && ! mostly_zeros_p (exp))))
6419 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6420 {
6421 rtx constructor = output_constant_def (exp);
6422
6423 if (modifier != EXPAND_CONST_ADDRESS
6424 && modifier != EXPAND_INITIALIZER
6425 && modifier != EXPAND_SUM
6426 && (! memory_address_p (GET_MODE (constructor),
6427 XEXP (constructor, 0))
6428 || (flag_force_addr
6429 && GET_CODE (XEXP (constructor, 0)) != REG)))
6430 constructor = change_address (constructor, VOIDmode,
6431 XEXP (constructor, 0));
6432 return constructor;
6433 }
6434
6435 else
6436 {
6437 /* Handle calls that pass values in multiple non-contiguous
6438 locations. The Irix 6 ABI has examples of this. */
6439 if (target == 0 || ! safe_from_p (target, exp, 1)
6440 || GET_CODE (target) == PARALLEL)
6441 {
6442 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6443 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6444 else
6445 target = assign_temp (type, 0, 1, 1);
6446 }
6447
6448 if (TREE_READONLY (exp))
6449 {
6450 if (GET_CODE (target) == MEM)
6451 target = copy_rtx (target);
6452
6453 RTX_UNCHANGING_P (target) = 1;
6454 }
6455
6456 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6457 int_size_in_bytes (TREE_TYPE (exp)));
6458 return target;
6459 }
6460
6461 case INDIRECT_REF:
6462 {
6463 tree exp1 = TREE_OPERAND (exp, 0);
6464 tree index;
6465 tree string = string_constant (exp1, &index);
6466
6467 /* Try to optimize reads from const strings. */
6468 if (string
6469 && TREE_CODE (string) == STRING_CST
6470 && TREE_CODE (index) == INTEGER_CST
6471 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6472 && GET_MODE_CLASS (mode) == MODE_INT
6473 && GET_MODE_SIZE (mode) == 1
6474 && modifier != EXPAND_MEMORY_USE_WO)
6475 return
6476 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6477
6478 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6479 op0 = memory_address (mode, op0);
6480
6481 if (cfun && current_function_check_memory_usage
6482 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6483 {
6484 enum memory_use_mode memory_usage;
6485 memory_usage = get_memory_usage_from_modifier (modifier);
6486
6487 if (memory_usage != MEMORY_USE_DONT)
6488 {
6489 in_check_memory_usage = 1;
6490 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6491 op0, Pmode,
6492 GEN_INT (int_size_in_bytes (type)),
6493 TYPE_MODE (sizetype),
6494 GEN_INT (memory_usage),
6495 TYPE_MODE (integer_type_node));
6496 in_check_memory_usage = 0;
6497 }
6498 }
6499
6500 temp = gen_rtx_MEM (mode, op0);
6501 set_mem_attributes (temp, exp, 0);
6502
6503 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6504 here, because, in C and C++, the fact that a location is accessed
6505 through a pointer to const does not mean that the value there can
6506 never change. Languages where it can never change should
6507 also set TREE_STATIC. */
6508 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6509
6510 /* If we are writing to this object and its type is a record with
6511 readonly fields, we must mark it as readonly so it will
6512 conflict with readonly references to those fields. */
6513 if (modifier == EXPAND_MEMORY_USE_WO
6514 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6515 RTX_UNCHANGING_P (temp) = 1;
6516
6517 return temp;
6518 }
6519
6520 case ARRAY_REF:
6521 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6522 abort ();
6523
6524 {
6525 tree array = TREE_OPERAND (exp, 0);
6526 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6527 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6528 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6529 HOST_WIDE_INT i;
6530
6531 /* Optimize the special-case of a zero lower bound.
6532
6533 We convert the low_bound to sizetype to avoid some problems
6534 with constant folding. (E.g. suppose the lower bound is 1,
6535 and its mode is QI. Without the conversion, (ARRAY
6536 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6537 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6538
6539 if (! integer_zerop (low_bound))
6540 index = size_diffop (index, convert (sizetype, low_bound));
6541
6542 /* Fold an expression like: "foo"[2].
6543 This is not done in fold so it won't happen inside &.
6544 Don't fold if this is for wide characters since it's too
6545 difficult to do correctly and this is a very rare case. */
6546
6547 if (TREE_CODE (array) == STRING_CST
6548 && TREE_CODE (index) == INTEGER_CST
6549 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6550 && GET_MODE_CLASS (mode) == MODE_INT
6551 && GET_MODE_SIZE (mode) == 1)
6552 return
6553 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6554
6555 /* If this is a constant index into a constant array,
6556 just get the value from the array. Handle both the cases when
6557 we have an explicit constructor and when our operand is a variable
6558 that was declared const. */
6559
6560 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6561 && TREE_CODE (index) == INTEGER_CST
6562 && 0 > compare_tree_int (index,
6563 list_length (CONSTRUCTOR_ELTS
6564 (TREE_OPERAND (exp, 0)))))
6565 {
6566 tree elem;
6567
6568 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6569 i = TREE_INT_CST_LOW (index);
6570 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6571 ;
6572
6573 if (elem)
6574 return expand_expr (fold (TREE_VALUE (elem)), target,
6575 tmode, ro_modifier);
6576 }
6577
6578 else if (optimize >= 1
6579 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6580 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6581 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6582 {
6583 if (TREE_CODE (index) == INTEGER_CST)
6584 {
6585 tree init = DECL_INITIAL (array);
6586
6587 if (TREE_CODE (init) == CONSTRUCTOR)
6588 {
6589 tree elem;
6590
6591 for (elem = CONSTRUCTOR_ELTS (init);
6592 (elem
6593 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6594 elem = TREE_CHAIN (elem))
6595 ;
6596
6597 if (elem)
6598 return expand_expr (fold (TREE_VALUE (elem)), target,
6599 tmode, ro_modifier);
6600 }
6601 else if (TREE_CODE (init) == STRING_CST
6602 && 0 > compare_tree_int (index,
6603 TREE_STRING_LENGTH (init)))
6604 return (GEN_INT
6605 (TREE_STRING_POINTER
6606 (init)[TREE_INT_CST_LOW (index)]));
6607 }
6608 }
6609 }
6610 /* Fall through. */
6611
6612 case COMPONENT_REF:
6613 case BIT_FIELD_REF:
6614 /* If the operand is a CONSTRUCTOR, we can just extract the
6615 appropriate field if it is present. Don't do this if we have
6616 already written the data since we want to refer to that copy
6617 and varasm.c assumes that's what we'll do. */
6618 if (code != ARRAY_REF
6619 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6620 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6621 {
6622 tree elt;
6623
6624 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6625 elt = TREE_CHAIN (elt))
6626 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6627 /* We can normally use the value of the field in the
6628 CONSTRUCTOR. However, if this is a bitfield in
6629 an integral mode that we can fit in a HOST_WIDE_INT,
6630 we must mask only the number of bits in the bitfield,
6631 since this is done implicitly by the constructor. If
6632 the bitfield does not meet either of those conditions,
6633 we can't do this optimization. */
6634 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6635 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6636 == MODE_INT)
6637 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6638 <= HOST_BITS_PER_WIDE_INT))))
6639 {
6640 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6641 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6642 {
6643 HOST_WIDE_INT bitsize
6644 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6645
6646 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6647 {
6648 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6649 op0 = expand_and (op0, op1, target);
6650 }
6651 else
6652 {
6653 enum machine_mode imode
6654 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6655 tree count
6656 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6657 0);
6658
6659 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6660 target, 0);
6661 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6662 target, 0);
6663 }
6664 }
6665
6666 return op0;
6667 }
6668 }
6669
6670 {
6671 enum machine_mode mode1;
6672 HOST_WIDE_INT bitsize, bitpos;
6673 tree offset;
6674 int volatilep = 0;
6675 unsigned int alignment;
6676 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6677 &mode1, &unsignedp, &volatilep,
6678 &alignment);
6679
6680 /* If we got back the original object, something is wrong. Perhaps
6681 we are evaluating an expression too early. In any event, don't
6682 infinitely recurse. */
6683 if (tem == exp)
6684 abort ();
6685
6686 /* If TEM's type is a union of variable size, pass TARGET to the inner
6687 computation, since it will need a temporary and TARGET is known
6688 to have to do. This occurs in unchecked conversion in Ada. */
6689
6690 op0 = expand_expr (tem,
6691 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6692 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6693 != INTEGER_CST)
6694 ? target : NULL_RTX),
6695 VOIDmode,
6696 (modifier == EXPAND_INITIALIZER
6697 || modifier == EXPAND_CONST_ADDRESS)
6698 ? modifier : EXPAND_NORMAL);
6699
6700 /* If this is a constant, put it into a register if it is a
6701 legitimate constant and OFFSET is 0 and memory if it isn't. */
6702 if (CONSTANT_P (op0))
6703 {
6704 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6705 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6706 && offset == 0)
6707 op0 = force_reg (mode, op0);
6708 else
6709 op0 = validize_mem (force_const_mem (mode, op0));
6710 }
6711
6712 if (offset != 0)
6713 {
6714 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6715
6716 /* If this object is in memory, put it into a register.
6717 This case can't occur in C, but can in Ada if we have
6718 unchecked conversion of an expression from a scalar type to
6719 an array or record type. */
6720 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6721 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6722 {
6723 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6724
6725 mark_temp_addr_taken (memloc);
6726 emit_move_insn (memloc, op0);
6727 op0 = memloc;
6728 }
6729
6730 if (GET_CODE (op0) != MEM)
6731 abort ();
6732
6733 if (GET_MODE (offset_rtx) != ptr_mode)
6734 {
6735 #ifdef POINTERS_EXTEND_UNSIGNED
6736 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6737 #else
6738 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6739 #endif
6740 }
6741
6742 /* A constant address in OP0 can have VOIDmode, we must not try
6743 to call force_reg for that case. Avoid that case. */
6744 if (GET_CODE (op0) == MEM
6745 && GET_MODE (op0) == BLKmode
6746 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6747 && bitsize != 0
6748 && (bitpos % bitsize) == 0
6749 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6750 && alignment == GET_MODE_ALIGNMENT (mode1))
6751 {
6752 rtx temp = change_address (op0, mode1,
6753 plus_constant (XEXP (op0, 0),
6754 (bitpos /
6755 BITS_PER_UNIT)));
6756 if (GET_CODE (XEXP (temp, 0)) == REG)
6757 op0 = temp;
6758 else
6759 op0 = change_address (op0, mode1,
6760 force_reg (GET_MODE (XEXP (temp, 0)),
6761 XEXP (temp, 0)));
6762 bitpos = 0;
6763 }
6764
6765 op0 = change_address (op0, VOIDmode,
6766 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6767 force_reg (ptr_mode,
6768 offset_rtx)));
6769 }
6770
6771 /* Don't forget about volatility even if this is a bitfield. */
6772 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6773 {
6774 op0 = copy_rtx (op0);
6775 MEM_VOLATILE_P (op0) = 1;
6776 }
6777
6778 /* Check the access. */
6779 if (cfun != 0 && current_function_check_memory_usage
6780 && GET_CODE (op0) == MEM)
6781 {
6782 enum memory_use_mode memory_usage;
6783 memory_usage = get_memory_usage_from_modifier (modifier);
6784
6785 if (memory_usage != MEMORY_USE_DONT)
6786 {
6787 rtx to;
6788 int size;
6789
6790 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6791 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6792
6793 /* Check the access right of the pointer. */
6794 in_check_memory_usage = 1;
6795 if (size > BITS_PER_UNIT)
6796 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6797 to, Pmode,
6798 GEN_INT (size / BITS_PER_UNIT),
6799 TYPE_MODE (sizetype),
6800 GEN_INT (memory_usage),
6801 TYPE_MODE (integer_type_node));
6802 in_check_memory_usage = 0;
6803 }
6804 }
6805
6806 /* In cases where an aligned union has an unaligned object
6807 as a field, we might be extracting a BLKmode value from
6808 an integer-mode (e.g., SImode) object. Handle this case
6809 by doing the extract into an object as wide as the field
6810 (which we know to be the width of a basic mode), then
6811 storing into memory, and changing the mode to BLKmode.
6812 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6813 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6814 if (mode1 == VOIDmode
6815 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6816 || (modifier != EXPAND_CONST_ADDRESS
6817 && modifier != EXPAND_INITIALIZER
6818 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6819 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6820 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6821 /* If the field isn't aligned enough to fetch as a memref,
6822 fetch it as a bit field. */
6823 || (mode1 != BLKmode
6824 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6825 && ((TYPE_ALIGN (TREE_TYPE (tem))
6826 < GET_MODE_ALIGNMENT (mode))
6827 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6828 /* If the type and the field are a constant size and the
6829 size of the type isn't the same size as the bitfield,
6830 we must use bitfield operations. */
6831 || ((bitsize >= 0
6832 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6833 == INTEGER_CST)
6834 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6835 bitsize)))))
6836 || (modifier != EXPAND_CONST_ADDRESS
6837 && modifier != EXPAND_INITIALIZER
6838 && mode == BLKmode
6839 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6840 && (TYPE_ALIGN (type) > alignment
6841 || bitpos % TYPE_ALIGN (type) != 0)))
6842 {
6843 enum machine_mode ext_mode = mode;
6844
6845 if (ext_mode == BLKmode
6846 && ! (target != 0 && GET_CODE (op0) == MEM
6847 && GET_CODE (target) == MEM
6848 && bitpos % BITS_PER_UNIT == 0))
6849 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6850
6851 if (ext_mode == BLKmode)
6852 {
6853 /* In this case, BITPOS must start at a byte boundary and
6854 TARGET, if specified, must be a MEM. */
6855 if (GET_CODE (op0) != MEM
6856 || (target != 0 && GET_CODE (target) != MEM)
6857 || bitpos % BITS_PER_UNIT != 0)
6858 abort ();
6859
6860 op0 = change_address (op0, VOIDmode,
6861 plus_constant (XEXP (op0, 0),
6862 bitpos / BITS_PER_UNIT));
6863 if (target == 0)
6864 target = assign_temp (type, 0, 1, 1);
6865
6866 emit_block_move (target, op0,
6867 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6868 / BITS_PER_UNIT),
6869 BITS_PER_UNIT);
6870
6871 return target;
6872 }
6873
6874 op0 = validize_mem (op0);
6875
6876 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6877 mark_reg_pointer (XEXP (op0, 0), alignment);
6878
6879 op0 = extract_bit_field (op0, bitsize, bitpos,
6880 unsignedp, target, ext_mode, ext_mode,
6881 alignment,
6882 int_size_in_bytes (TREE_TYPE (tem)));
6883
6884 /* If the result is a record type and BITSIZE is narrower than
6885 the mode of OP0, an integral mode, and this is a big endian
6886 machine, we must put the field into the high-order bits. */
6887 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6888 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6889 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6890 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6891 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6892 - bitsize),
6893 op0, 1);
6894
6895 if (mode == BLKmode)
6896 {
6897 rtx new = assign_stack_temp (ext_mode,
6898 bitsize / BITS_PER_UNIT, 0);
6899
6900 emit_move_insn (new, op0);
6901 op0 = copy_rtx (new);
6902 PUT_MODE (op0, BLKmode);
6903 MEM_SET_IN_STRUCT_P (op0, 1);
6904 }
6905
6906 return op0;
6907 }
6908
6909 /* If the result is BLKmode, use that to access the object
6910 now as well. */
6911 if (mode == BLKmode)
6912 mode1 = BLKmode;
6913
6914 /* Get a reference to just this component. */
6915 if (modifier == EXPAND_CONST_ADDRESS
6916 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6917 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6918 (bitpos / BITS_PER_UNIT)));
6919 else
6920 op0 = change_address (op0, mode1,
6921 plus_constant (XEXP (op0, 0),
6922 (bitpos / BITS_PER_UNIT)));
6923
6924 set_mem_attributes (op0, exp, 0);
6925 if (GET_CODE (XEXP (op0, 0)) == REG)
6926 mark_reg_pointer (XEXP (op0, 0), alignment);
6927
6928 MEM_VOLATILE_P (op0) |= volatilep;
6929 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6930 || modifier == EXPAND_CONST_ADDRESS
6931 || modifier == EXPAND_INITIALIZER)
6932 return op0;
6933 else if (target == 0)
6934 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6935
6936 convert_move (target, op0, unsignedp);
6937 return target;
6938 }
6939
6940 /* Intended for a reference to a buffer of a file-object in Pascal.
6941 But it's not certain that a special tree code will really be
6942 necessary for these. INDIRECT_REF might work for them. */
6943 case BUFFER_REF:
6944 abort ();
6945
6946 case IN_EXPR:
6947 {
6948 /* Pascal set IN expression.
6949
6950 Algorithm:
6951 rlo = set_low - (set_low%bits_per_word);
6952 the_word = set [ (index - rlo)/bits_per_word ];
6953 bit_index = index % bits_per_word;
6954 bitmask = 1 << bit_index;
6955 return !!(the_word & bitmask); */
6956
6957 tree set = TREE_OPERAND (exp, 0);
6958 tree index = TREE_OPERAND (exp, 1);
6959 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6960 tree set_type = TREE_TYPE (set);
6961 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6962 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6963 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6964 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6965 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6966 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6967 rtx setaddr = XEXP (setval, 0);
6968 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6969 rtx rlow;
6970 rtx diff, quo, rem, addr, bit, result;
6971
6972 preexpand_calls (exp);
6973
6974 /* If domain is empty, answer is no. Likewise if index is constant
6975 and out of bounds. */
6976 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6977 && TREE_CODE (set_low_bound) == INTEGER_CST
6978 && tree_int_cst_lt (set_high_bound, set_low_bound))
6979 || (TREE_CODE (index) == INTEGER_CST
6980 && TREE_CODE (set_low_bound) == INTEGER_CST
6981 && tree_int_cst_lt (index, set_low_bound))
6982 || (TREE_CODE (set_high_bound) == INTEGER_CST
6983 && TREE_CODE (index) == INTEGER_CST
6984 && tree_int_cst_lt (set_high_bound, index))))
6985 return const0_rtx;
6986
6987 if (target == 0)
6988 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6989
6990 /* If we get here, we have to generate the code for both cases
6991 (in range and out of range). */
6992
6993 op0 = gen_label_rtx ();
6994 op1 = gen_label_rtx ();
6995
6996 if (! (GET_CODE (index_val) == CONST_INT
6997 && GET_CODE (lo_r) == CONST_INT))
6998 {
6999 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7000 GET_MODE (index_val), iunsignedp, 0, op1);
7001 }
7002
7003 if (! (GET_CODE (index_val) == CONST_INT
7004 && GET_CODE (hi_r) == CONST_INT))
7005 {
7006 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7007 GET_MODE (index_val), iunsignedp, 0, op1);
7008 }
7009
7010 /* Calculate the element number of bit zero in the first word
7011 of the set. */
7012 if (GET_CODE (lo_r) == CONST_INT)
7013 rlow = GEN_INT (INTVAL (lo_r)
7014 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7015 else
7016 rlow = expand_binop (index_mode, and_optab, lo_r,
7017 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7018 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7019
7020 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7021 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7022
7023 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7024 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7025 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7026 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7027
7028 addr = memory_address (byte_mode,
7029 expand_binop (index_mode, add_optab, diff,
7030 setaddr, NULL_RTX, iunsignedp,
7031 OPTAB_LIB_WIDEN));
7032
7033 /* Extract the bit we want to examine. */
7034 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7035 gen_rtx_MEM (byte_mode, addr),
7036 make_tree (TREE_TYPE (index), rem),
7037 NULL_RTX, 1);
7038 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7039 GET_MODE (target) == byte_mode ? target : 0,
7040 1, OPTAB_LIB_WIDEN);
7041
7042 if (result != target)
7043 convert_move (target, result, 1);
7044
7045 /* Output the code to handle the out-of-range case. */
7046 emit_jump (op0);
7047 emit_label (op1);
7048 emit_move_insn (target, const0_rtx);
7049 emit_label (op0);
7050 return target;
7051 }
7052
7053 case WITH_CLEANUP_EXPR:
7054 if (RTL_EXPR_RTL (exp) == 0)
7055 {
7056 RTL_EXPR_RTL (exp)
7057 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7058 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7059
7060 /* That's it for this cleanup. */
7061 TREE_OPERAND (exp, 2) = 0;
7062 }
7063 return RTL_EXPR_RTL (exp);
7064
7065 case CLEANUP_POINT_EXPR:
7066 {
7067 /* Start a new binding layer that will keep track of all cleanup
7068 actions to be performed. */
7069 expand_start_bindings (2);
7070
7071 target_temp_slot_level = temp_slot_level;
7072
7073 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7074 /* If we're going to use this value, load it up now. */
7075 if (! ignore)
7076 op0 = force_not_mem (op0);
7077 preserve_temp_slots (op0);
7078 expand_end_bindings (NULL_TREE, 0, 0);
7079 }
7080 return op0;
7081
7082 case CALL_EXPR:
7083 /* Check for a built-in function. */
7084 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7085 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7086 == FUNCTION_DECL)
7087 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7088 return expand_builtin (exp, target, subtarget, tmode, ignore);
7089
7090 /* If this call was expanded already by preexpand_calls,
7091 just return the result we got. */
7092 if (CALL_EXPR_RTL (exp) != 0)
7093 return CALL_EXPR_RTL (exp);
7094
7095 return expand_call (exp, target, ignore);
7096
7097 case NON_LVALUE_EXPR:
7098 case NOP_EXPR:
7099 case CONVERT_EXPR:
7100 case REFERENCE_EXPR:
7101 if (TREE_OPERAND (exp, 0) == error_mark_node)
7102 return const0_rtx;
7103
7104 if (TREE_CODE (type) == UNION_TYPE)
7105 {
7106 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7107
7108 /* If both input and output are BLKmode, this conversion
7109 isn't actually doing anything unless we need to make the
7110 alignment stricter. */
7111 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7112 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7113 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7114 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7115 modifier);
7116
7117 if (target == 0)
7118 {
7119 if (mode != BLKmode)
7120 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7121 else
7122 target = assign_temp (type, 0, 1, 1);
7123 }
7124
7125 if (GET_CODE (target) == MEM)
7126 /* Store data into beginning of memory target. */
7127 store_expr (TREE_OPERAND (exp, 0),
7128 change_address (target, TYPE_MODE (valtype), 0), 0);
7129
7130 else if (GET_CODE (target) == REG)
7131 /* Store this field into a union of the proper type. */
7132 store_field (target,
7133 MIN ((int_size_in_bytes (TREE_TYPE
7134 (TREE_OPERAND (exp, 0)))
7135 * BITS_PER_UNIT),
7136 GET_MODE_BITSIZE (mode)),
7137 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7138 VOIDmode, 0, BITS_PER_UNIT,
7139 int_size_in_bytes (type), 0);
7140 else
7141 abort ();
7142
7143 /* Return the entire union. */
7144 return target;
7145 }
7146
7147 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7148 {
7149 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7150 ro_modifier);
7151
7152 /* If the signedness of the conversion differs and OP0 is
7153 a promoted SUBREG, clear that indication since we now
7154 have to do the proper extension. */
7155 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7156 && GET_CODE (op0) == SUBREG)
7157 SUBREG_PROMOTED_VAR_P (op0) = 0;
7158
7159 return op0;
7160 }
7161
7162 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7163 if (GET_MODE (op0) == mode)
7164 return op0;
7165
7166 /* If OP0 is a constant, just convert it into the proper mode. */
7167 if (CONSTANT_P (op0))
7168 return
7169 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7170 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7171
7172 if (modifier == EXPAND_INITIALIZER)
7173 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7174
7175 if (target == 0)
7176 return
7177 convert_to_mode (mode, op0,
7178 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7179 else
7180 convert_move (target, op0,
7181 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7182 return target;
7183
7184 case PLUS_EXPR:
7185 /* We come here from MINUS_EXPR when the second operand is a
7186 constant. */
7187 plus_expr:
7188 this_optab = add_optab;
7189
7190 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7191 something else, make sure we add the register to the constant and
7192 then to the other thing. This case can occur during strength
7193 reduction and doing it this way will produce better code if the
7194 frame pointer or argument pointer is eliminated.
7195
7196 fold-const.c will ensure that the constant is always in the inner
7197 PLUS_EXPR, so the only case we need to do anything about is if
7198 sp, ap, or fp is our second argument, in which case we must swap
7199 the innermost first argument and our second argument. */
7200
7201 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7202 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7203 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7204 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7205 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7206 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7207 {
7208 tree t = TREE_OPERAND (exp, 1);
7209
7210 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7211 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7212 }
7213
7214 /* If the result is to be ptr_mode and we are adding an integer to
7215 something, we might be forming a constant. So try to use
7216 plus_constant. If it produces a sum and we can't accept it,
7217 use force_operand. This allows P = &ARR[const] to generate
7218 efficient code on machines where a SYMBOL_REF is not a valid
7219 address.
7220
7221 If this is an EXPAND_SUM call, always return the sum. */
7222 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7223 || mode == ptr_mode)
7224 {
7225 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7226 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7227 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7228 {
7229 rtx constant_part;
7230
7231 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7232 EXPAND_SUM);
7233 /* Use immed_double_const to ensure that the constant is
7234 truncated according to the mode of OP1, then sign extended
7235 to a HOST_WIDE_INT. Using the constant directly can result
7236 in non-canonical RTL in a 64x32 cross compile. */
7237 constant_part
7238 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7239 (HOST_WIDE_INT) 0,
7240 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7241 op1 = plus_constant (op1, INTVAL (constant_part));
7242 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7243 op1 = force_operand (op1, target);
7244 return op1;
7245 }
7246
7247 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7248 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7249 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7250 {
7251 rtx constant_part;
7252
7253 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7254 EXPAND_SUM);
7255 if (! CONSTANT_P (op0))
7256 {
7257 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7258 VOIDmode, modifier);
7259 /* Don't go to both_summands if modifier
7260 says it's not right to return a PLUS. */
7261 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7262 goto binop2;
7263 goto both_summands;
7264 }
7265 /* Use immed_double_const to ensure that the constant is
7266 truncated according to the mode of OP1, then sign extended
7267 to a HOST_WIDE_INT. Using the constant directly can result
7268 in non-canonical RTL in a 64x32 cross compile. */
7269 constant_part
7270 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7271 (HOST_WIDE_INT) 0,
7272 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7273 op0 = plus_constant (op0, INTVAL (constant_part));
7274 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7275 op0 = force_operand (op0, target);
7276 return op0;
7277 }
7278 }
7279
7280 /* No sense saving up arithmetic to be done
7281 if it's all in the wrong mode to form part of an address.
7282 And force_operand won't know whether to sign-extend or
7283 zero-extend. */
7284 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7285 || mode != ptr_mode)
7286 goto binop;
7287
7288 preexpand_calls (exp);
7289 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7290 subtarget = 0;
7291
7292 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7293 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7294
7295 both_summands:
7296 /* Make sure any term that's a sum with a constant comes last. */
7297 if (GET_CODE (op0) == PLUS
7298 && CONSTANT_P (XEXP (op0, 1)))
7299 {
7300 temp = op0;
7301 op0 = op1;
7302 op1 = temp;
7303 }
7304 /* If adding to a sum including a constant,
7305 associate it to put the constant outside. */
7306 if (GET_CODE (op1) == PLUS
7307 && CONSTANT_P (XEXP (op1, 1)))
7308 {
7309 rtx constant_term = const0_rtx;
7310
7311 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7312 if (temp != 0)
7313 op0 = temp;
7314 /* Ensure that MULT comes first if there is one. */
7315 else if (GET_CODE (op0) == MULT)
7316 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7317 else
7318 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7319
7320 /* Let's also eliminate constants from op0 if possible. */
7321 op0 = eliminate_constant_term (op0, &constant_term);
7322
7323 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7324 their sum should be a constant. Form it into OP1, since the
7325 result we want will then be OP0 + OP1. */
7326
7327 temp = simplify_binary_operation (PLUS, mode, constant_term,
7328 XEXP (op1, 1));
7329 if (temp != 0)
7330 op1 = temp;
7331 else
7332 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7333 }
7334
7335 /* Put a constant term last and put a multiplication first. */
7336 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7337 temp = op1, op1 = op0, op0 = temp;
7338
7339 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7340 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7341
7342 case MINUS_EXPR:
7343 /* For initializers, we are allowed to return a MINUS of two
7344 symbolic constants. Here we handle all cases when both operands
7345 are constant. */
7346 /* Handle difference of two symbolic constants,
7347 for the sake of an initializer. */
7348 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7349 && really_constant_p (TREE_OPERAND (exp, 0))
7350 && really_constant_p (TREE_OPERAND (exp, 1)))
7351 {
7352 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7353 VOIDmode, ro_modifier);
7354 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7355 VOIDmode, ro_modifier);
7356
7357 /* If the last operand is a CONST_INT, use plus_constant of
7358 the negated constant. Else make the MINUS. */
7359 if (GET_CODE (op1) == CONST_INT)
7360 return plus_constant (op0, - INTVAL (op1));
7361 else
7362 return gen_rtx_MINUS (mode, op0, op1);
7363 }
7364 /* Convert A - const to A + (-const). */
7365 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7366 {
7367 tree negated = fold (build1 (NEGATE_EXPR, type,
7368 TREE_OPERAND (exp, 1)));
7369
7370 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7371 /* If we can't negate the constant in TYPE, leave it alone and
7372 expand_binop will negate it for us. We used to try to do it
7373 here in the signed version of TYPE, but that doesn't work
7374 on POINTER_TYPEs. */;
7375 else
7376 {
7377 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7378 goto plus_expr;
7379 }
7380 }
7381 this_optab = sub_optab;
7382 goto binop;
7383
7384 case MULT_EXPR:
7385 preexpand_calls (exp);
7386 /* If first operand is constant, swap them.
7387 Thus the following special case checks need only
7388 check the second operand. */
7389 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7390 {
7391 register tree t1 = TREE_OPERAND (exp, 0);
7392 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7393 TREE_OPERAND (exp, 1) = t1;
7394 }
7395
7396 /* Attempt to return something suitable for generating an
7397 indexed address, for machines that support that. */
7398
7399 if (modifier == EXPAND_SUM && mode == ptr_mode
7400 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7402 {
7403 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7404 EXPAND_SUM);
7405
7406 /* Apply distributive law if OP0 is x+c. */
7407 if (GET_CODE (op0) == PLUS
7408 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7409 return
7410 gen_rtx_PLUS
7411 (mode,
7412 gen_rtx_MULT
7413 (mode, XEXP (op0, 0),
7414 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7415 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7416 * INTVAL (XEXP (op0, 1))));
7417
7418 if (GET_CODE (op0) != REG)
7419 op0 = force_operand (op0, NULL_RTX);
7420 if (GET_CODE (op0) != REG)
7421 op0 = copy_to_mode_reg (mode, op0);
7422
7423 return
7424 gen_rtx_MULT (mode, op0,
7425 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7426 }
7427
7428 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7429 subtarget = 0;
7430
7431 /* Check for multiplying things that have been extended
7432 from a narrower type. If this machine supports multiplying
7433 in that narrower type with a result in the desired type,
7434 do it that way, and avoid the explicit type-conversion. */
7435 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7436 && TREE_CODE (type) == INTEGER_TYPE
7437 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7438 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7439 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7440 && int_fits_type_p (TREE_OPERAND (exp, 1),
7441 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7442 /* Don't use a widening multiply if a shift will do. */
7443 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7444 > HOST_BITS_PER_WIDE_INT)
7445 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7446 ||
7447 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7448 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7449 ==
7450 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7451 /* If both operands are extended, they must either both
7452 be zero-extended or both be sign-extended. */
7453 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7454 ==
7455 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7456 {
7457 enum machine_mode innermode
7458 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7459 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7460 ? smul_widen_optab : umul_widen_optab);
7461 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7462 ? umul_widen_optab : smul_widen_optab);
7463 if (mode == GET_MODE_WIDER_MODE (innermode))
7464 {
7465 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7466 {
7467 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7468 NULL_RTX, VOIDmode, 0);
7469 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7470 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7471 VOIDmode, 0);
7472 else
7473 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7474 NULL_RTX, VOIDmode, 0);
7475 goto binop2;
7476 }
7477 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7478 && innermode == word_mode)
7479 {
7480 rtx htem;
7481 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7482 NULL_RTX, VOIDmode, 0);
7483 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7484 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7485 VOIDmode, 0);
7486 else
7487 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7488 NULL_RTX, VOIDmode, 0);
7489 temp = expand_binop (mode, other_optab, op0, op1, target,
7490 unsignedp, OPTAB_LIB_WIDEN);
7491 htem = expand_mult_highpart_adjust (innermode,
7492 gen_highpart (innermode, temp),
7493 op0, op1,
7494 gen_highpart (innermode, temp),
7495 unsignedp);
7496 emit_move_insn (gen_highpart (innermode, temp), htem);
7497 return temp;
7498 }
7499 }
7500 }
7501 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7502 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7503 return expand_mult (mode, op0, op1, target, unsignedp);
7504
7505 case TRUNC_DIV_EXPR:
7506 case FLOOR_DIV_EXPR:
7507 case CEIL_DIV_EXPR:
7508 case ROUND_DIV_EXPR:
7509 case EXACT_DIV_EXPR:
7510 preexpand_calls (exp);
7511 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7512 subtarget = 0;
7513 /* Possible optimization: compute the dividend with EXPAND_SUM
7514 then if the divisor is constant can optimize the case
7515 where some terms of the dividend have coeffs divisible by it. */
7516 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7517 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7518 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7519
7520 case RDIV_EXPR:
7521 this_optab = flodiv_optab;
7522 goto binop;
7523
7524 case TRUNC_MOD_EXPR:
7525 case FLOOR_MOD_EXPR:
7526 case CEIL_MOD_EXPR:
7527 case ROUND_MOD_EXPR:
7528 preexpand_calls (exp);
7529 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7530 subtarget = 0;
7531 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7532 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7533 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7534
7535 case FIX_ROUND_EXPR:
7536 case FIX_FLOOR_EXPR:
7537 case FIX_CEIL_EXPR:
7538 abort (); /* Not used for C. */
7539
7540 case FIX_TRUNC_EXPR:
7541 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7542 if (target == 0)
7543 target = gen_reg_rtx (mode);
7544 expand_fix (target, op0, unsignedp);
7545 return target;
7546
7547 case FLOAT_EXPR:
7548 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7549 if (target == 0)
7550 target = gen_reg_rtx (mode);
7551 /* expand_float can't figure out what to do if FROM has VOIDmode.
7552 So give it the correct mode. With -O, cse will optimize this. */
7553 if (GET_MODE (op0) == VOIDmode)
7554 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7555 op0);
7556 expand_float (target, op0,
7557 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7558 return target;
7559
7560 case NEGATE_EXPR:
7561 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7562 temp = expand_unop (mode, neg_optab, op0, target, 0);
7563 if (temp == 0)
7564 abort ();
7565 return temp;
7566
7567 case ABS_EXPR:
7568 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7569
7570 /* Handle complex values specially. */
7571 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7572 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7573 return expand_complex_abs (mode, op0, target, unsignedp);
7574
7575 /* Unsigned abs is simply the operand. Testing here means we don't
7576 risk generating incorrect code below. */
7577 if (TREE_UNSIGNED (type))
7578 return op0;
7579
7580 return expand_abs (mode, op0, target,
7581 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7582
7583 case MAX_EXPR:
7584 case MIN_EXPR:
7585 target = original_target;
7586 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7587 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7588 || GET_MODE (target) != mode
7589 || (GET_CODE (target) == REG
7590 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7591 target = gen_reg_rtx (mode);
7592 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7593 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7594
7595 /* First try to do it with a special MIN or MAX instruction.
7596 If that does not win, use a conditional jump to select the proper
7597 value. */
7598 this_optab = (TREE_UNSIGNED (type)
7599 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7600 : (code == MIN_EXPR ? smin_optab : smax_optab));
7601
7602 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7603 OPTAB_WIDEN);
7604 if (temp != 0)
7605 return temp;
7606
7607 /* At this point, a MEM target is no longer useful; we will get better
7608 code without it. */
7609
7610 if (GET_CODE (target) == MEM)
7611 target = gen_reg_rtx (mode);
7612
7613 if (target != op0)
7614 emit_move_insn (target, op0);
7615
7616 op0 = gen_label_rtx ();
7617
7618 /* If this mode is an integer too wide to compare properly,
7619 compare word by word. Rely on cse to optimize constant cases. */
7620 if (GET_MODE_CLASS (mode) == MODE_INT
7621 && ! can_compare_p (GE, mode, ccp_jump))
7622 {
7623 if (code == MAX_EXPR)
7624 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7625 target, op1, NULL_RTX, op0);
7626 else
7627 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7628 op1, target, NULL_RTX, op0);
7629 }
7630 else
7631 {
7632 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7633 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7634 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7635 op0);
7636 }
7637 emit_move_insn (target, op1);
7638 emit_label (op0);
7639 return target;
7640
7641 case BIT_NOT_EXPR:
7642 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7643 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7644 if (temp == 0)
7645 abort ();
7646 return temp;
7647
7648 case FFS_EXPR:
7649 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7650 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7651 if (temp == 0)
7652 abort ();
7653 return temp;
7654
7655 /* ??? Can optimize bitwise operations with one arg constant.
7656 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7657 and (a bitwise1 b) bitwise2 b (etc)
7658 but that is probably not worth while. */
7659
7660 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7661 boolean values when we want in all cases to compute both of them. In
7662 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7663 as actual zero-or-1 values and then bitwise anding. In cases where
7664 there cannot be any side effects, better code would be made by
7665 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7666 how to recognize those cases. */
7667
7668 case TRUTH_AND_EXPR:
7669 case BIT_AND_EXPR:
7670 this_optab = and_optab;
7671 goto binop;
7672
7673 case TRUTH_OR_EXPR:
7674 case BIT_IOR_EXPR:
7675 this_optab = ior_optab;
7676 goto binop;
7677
7678 case TRUTH_XOR_EXPR:
7679 case BIT_XOR_EXPR:
7680 this_optab = xor_optab;
7681 goto binop;
7682
7683 case LSHIFT_EXPR:
7684 case RSHIFT_EXPR:
7685 case LROTATE_EXPR:
7686 case RROTATE_EXPR:
7687 preexpand_calls (exp);
7688 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7689 subtarget = 0;
7690 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7691 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7692 unsignedp);
7693
7694 /* Could determine the answer when only additive constants differ. Also,
7695 the addition of one can be handled by changing the condition. */
7696 case LT_EXPR:
7697 case LE_EXPR:
7698 case GT_EXPR:
7699 case GE_EXPR:
7700 case EQ_EXPR:
7701 case NE_EXPR:
7702 case UNORDERED_EXPR:
7703 case ORDERED_EXPR:
7704 case UNLT_EXPR:
7705 case UNLE_EXPR:
7706 case UNGT_EXPR:
7707 case UNGE_EXPR:
7708 case UNEQ_EXPR:
7709 preexpand_calls (exp);
7710 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7711 if (temp != 0)
7712 return temp;
7713
7714 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7715 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7716 && original_target
7717 && GET_CODE (original_target) == REG
7718 && (GET_MODE (original_target)
7719 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7720 {
7721 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7722 VOIDmode, 0);
7723
7724 if (temp != original_target)
7725 temp = copy_to_reg (temp);
7726
7727 op1 = gen_label_rtx ();
7728 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7729 GET_MODE (temp), unsignedp, 0, op1);
7730 emit_move_insn (temp, const1_rtx);
7731 emit_label (op1);
7732 return temp;
7733 }
7734
7735 /* If no set-flag instruction, must generate a conditional
7736 store into a temporary variable. Drop through
7737 and handle this like && and ||. */
7738
7739 case TRUTH_ANDIF_EXPR:
7740 case TRUTH_ORIF_EXPR:
7741 if (! ignore
7742 && (target == 0 || ! safe_from_p (target, exp, 1)
7743 /* Make sure we don't have a hard reg (such as function's return
7744 value) live across basic blocks, if not optimizing. */
7745 || (!optimize && GET_CODE (target) == REG
7746 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7747 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7748
7749 if (target)
7750 emit_clr_insn (target);
7751
7752 op1 = gen_label_rtx ();
7753 jumpifnot (exp, op1);
7754
7755 if (target)
7756 emit_0_to_1_insn (target);
7757
7758 emit_label (op1);
7759 return ignore ? const0_rtx : target;
7760
7761 case TRUTH_NOT_EXPR:
7762 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7763 /* The parser is careful to generate TRUTH_NOT_EXPR
7764 only with operands that are always zero or one. */
7765 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7766 target, 1, OPTAB_LIB_WIDEN);
7767 if (temp == 0)
7768 abort ();
7769 return temp;
7770
7771 case COMPOUND_EXPR:
7772 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7773 emit_queue ();
7774 return expand_expr (TREE_OPERAND (exp, 1),
7775 (ignore ? const0_rtx : target),
7776 VOIDmode, 0);
7777
7778 case COND_EXPR:
7779 /* If we would have a "singleton" (see below) were it not for a
7780 conversion in each arm, bring that conversion back out. */
7781 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7782 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7783 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7784 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7785 {
7786 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7787 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7788
7789 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7790 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7791 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7792 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7793 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7794 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7795 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7796 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7797 return expand_expr (build1 (NOP_EXPR, type,
7798 build (COND_EXPR, TREE_TYPE (true),
7799 TREE_OPERAND (exp, 0),
7800 true, false)),
7801 target, tmode, modifier);
7802 }
7803
7804 {
7805 /* Note that COND_EXPRs whose type is a structure or union
7806 are required to be constructed to contain assignments of
7807 a temporary variable, so that we can evaluate them here
7808 for side effect only. If type is void, we must do likewise. */
7809
7810 /* If an arm of the branch requires a cleanup,
7811 only that cleanup is performed. */
7812
7813 tree singleton = 0;
7814 tree binary_op = 0, unary_op = 0;
7815
7816 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7817 convert it to our mode, if necessary. */
7818 if (integer_onep (TREE_OPERAND (exp, 1))
7819 && integer_zerop (TREE_OPERAND (exp, 2))
7820 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7821 {
7822 if (ignore)
7823 {
7824 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7825 ro_modifier);
7826 return const0_rtx;
7827 }
7828
7829 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7830 if (GET_MODE (op0) == mode)
7831 return op0;
7832
7833 if (target == 0)
7834 target = gen_reg_rtx (mode);
7835 convert_move (target, op0, unsignedp);
7836 return target;
7837 }
7838
7839 /* Check for X ? A + B : A. If we have this, we can copy A to the
7840 output and conditionally add B. Similarly for unary operations.
7841 Don't do this if X has side-effects because those side effects
7842 might affect A or B and the "?" operation is a sequence point in
7843 ANSI. (operand_equal_p tests for side effects.) */
7844
7845 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7846 && operand_equal_p (TREE_OPERAND (exp, 2),
7847 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7848 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7849 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7850 && operand_equal_p (TREE_OPERAND (exp, 1),
7851 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7852 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7853 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7854 && operand_equal_p (TREE_OPERAND (exp, 2),
7855 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7856 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7857 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7858 && operand_equal_p (TREE_OPERAND (exp, 1),
7859 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7860 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7861
7862 /* If we are not to produce a result, we have no target. Otherwise,
7863 if a target was specified use it; it will not be used as an
7864 intermediate target unless it is safe. If no target, use a
7865 temporary. */
7866
7867 if (ignore)
7868 temp = 0;
7869 else if (original_target
7870 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7871 || (singleton && GET_CODE (original_target) == REG
7872 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7873 && original_target == var_rtx (singleton)))
7874 && GET_MODE (original_target) == mode
7875 #ifdef HAVE_conditional_move
7876 && (! can_conditionally_move_p (mode)
7877 || GET_CODE (original_target) == REG
7878 || TREE_ADDRESSABLE (type))
7879 #endif
7880 && ! (GET_CODE (original_target) == MEM
7881 && MEM_VOLATILE_P (original_target)))
7882 temp = original_target;
7883 else if (TREE_ADDRESSABLE (type))
7884 abort ();
7885 else
7886 temp = assign_temp (type, 0, 0, 1);
7887
7888 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7889 do the test of X as a store-flag operation, do this as
7890 A + ((X != 0) << log C). Similarly for other simple binary
7891 operators. Only do for C == 1 if BRANCH_COST is low. */
7892 if (temp && singleton && binary_op
7893 && (TREE_CODE (binary_op) == PLUS_EXPR
7894 || TREE_CODE (binary_op) == MINUS_EXPR
7895 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7896 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7897 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7898 : integer_onep (TREE_OPERAND (binary_op, 1)))
7899 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7900 {
7901 rtx result;
7902 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7903 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7904 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7905 : xor_optab);
7906
7907 /* If we had X ? A : A + 1, do this as A + (X == 0).
7908
7909 We have to invert the truth value here and then put it
7910 back later if do_store_flag fails. We cannot simply copy
7911 TREE_OPERAND (exp, 0) to another variable and modify that
7912 because invert_truthvalue can modify the tree pointed to
7913 by its argument. */
7914 if (singleton == TREE_OPERAND (exp, 1))
7915 TREE_OPERAND (exp, 0)
7916 = invert_truthvalue (TREE_OPERAND (exp, 0));
7917
7918 result = do_store_flag (TREE_OPERAND (exp, 0),
7919 (safe_from_p (temp, singleton, 1)
7920 ? temp : NULL_RTX),
7921 mode, BRANCH_COST <= 1);
7922
7923 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7924 result = expand_shift (LSHIFT_EXPR, mode, result,
7925 build_int_2 (tree_log2
7926 (TREE_OPERAND
7927 (binary_op, 1)),
7928 0),
7929 (safe_from_p (temp, singleton, 1)
7930 ? temp : NULL_RTX), 0);
7931
7932 if (result)
7933 {
7934 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7935 return expand_binop (mode, boptab, op1, result, temp,
7936 unsignedp, OPTAB_LIB_WIDEN);
7937 }
7938 else if (singleton == TREE_OPERAND (exp, 1))
7939 TREE_OPERAND (exp, 0)
7940 = invert_truthvalue (TREE_OPERAND (exp, 0));
7941 }
7942
7943 do_pending_stack_adjust ();
7944 NO_DEFER_POP;
7945 op0 = gen_label_rtx ();
7946
7947 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7948 {
7949 if (temp != 0)
7950 {
7951 /* If the target conflicts with the other operand of the
7952 binary op, we can't use it. Also, we can't use the target
7953 if it is a hard register, because evaluating the condition
7954 might clobber it. */
7955 if ((binary_op
7956 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7957 || (GET_CODE (temp) == REG
7958 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7959 temp = gen_reg_rtx (mode);
7960 store_expr (singleton, temp, 0);
7961 }
7962 else
7963 expand_expr (singleton,
7964 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7965 if (singleton == TREE_OPERAND (exp, 1))
7966 jumpif (TREE_OPERAND (exp, 0), op0);
7967 else
7968 jumpifnot (TREE_OPERAND (exp, 0), op0);
7969
7970 start_cleanup_deferral ();
7971 if (binary_op && temp == 0)
7972 /* Just touch the other operand. */
7973 expand_expr (TREE_OPERAND (binary_op, 1),
7974 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7975 else if (binary_op)
7976 store_expr (build (TREE_CODE (binary_op), type,
7977 make_tree (type, temp),
7978 TREE_OPERAND (binary_op, 1)),
7979 temp, 0);
7980 else
7981 store_expr (build1 (TREE_CODE (unary_op), type,
7982 make_tree (type, temp)),
7983 temp, 0);
7984 op1 = op0;
7985 }
7986 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7987 comparison operator. If we have one of these cases, set the
7988 output to A, branch on A (cse will merge these two references),
7989 then set the output to FOO. */
7990 else if (temp
7991 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7992 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7993 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7994 TREE_OPERAND (exp, 1), 0)
7995 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7996 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7997 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7998 {
7999 if (GET_CODE (temp) == REG
8000 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8001 temp = gen_reg_rtx (mode);
8002 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8003 jumpif (TREE_OPERAND (exp, 0), op0);
8004
8005 start_cleanup_deferral ();
8006 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8007 op1 = op0;
8008 }
8009 else if (temp
8010 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8011 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8012 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8013 TREE_OPERAND (exp, 2), 0)
8014 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8015 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8016 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8017 {
8018 if (GET_CODE (temp) == REG
8019 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8020 temp = gen_reg_rtx (mode);
8021 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8022 jumpifnot (TREE_OPERAND (exp, 0), op0);
8023
8024 start_cleanup_deferral ();
8025 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8026 op1 = op0;
8027 }
8028 else
8029 {
8030 op1 = gen_label_rtx ();
8031 jumpifnot (TREE_OPERAND (exp, 0), op0);
8032
8033 start_cleanup_deferral ();
8034
8035 /* One branch of the cond can be void, if it never returns. For
8036 example A ? throw : E */
8037 if (temp != 0
8038 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8039 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8040 else
8041 expand_expr (TREE_OPERAND (exp, 1),
8042 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8043 end_cleanup_deferral ();
8044 emit_queue ();
8045 emit_jump_insn (gen_jump (op1));
8046 emit_barrier ();
8047 emit_label (op0);
8048 start_cleanup_deferral ();
8049 if (temp != 0
8050 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8051 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8052 else
8053 expand_expr (TREE_OPERAND (exp, 2),
8054 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8055 }
8056
8057 end_cleanup_deferral ();
8058
8059 emit_queue ();
8060 emit_label (op1);
8061 OK_DEFER_POP;
8062
8063 return temp;
8064 }
8065
8066 case TARGET_EXPR:
8067 {
8068 /* Something needs to be initialized, but we didn't know
8069 where that thing was when building the tree. For example,
8070 it could be the return value of a function, or a parameter
8071 to a function which lays down in the stack, or a temporary
8072 variable which must be passed by reference.
8073
8074 We guarantee that the expression will either be constructed
8075 or copied into our original target. */
8076
8077 tree slot = TREE_OPERAND (exp, 0);
8078 tree cleanups = NULL_TREE;
8079 tree exp1;
8080
8081 if (TREE_CODE (slot) != VAR_DECL)
8082 abort ();
8083
8084 if (! ignore)
8085 target = original_target;
8086
8087 /* Set this here so that if we get a target that refers to a
8088 register variable that's already been used, put_reg_into_stack
8089 knows that it should fix up those uses. */
8090 TREE_USED (slot) = 1;
8091
8092 if (target == 0)
8093 {
8094 if (DECL_RTL (slot) != 0)
8095 {
8096 target = DECL_RTL (slot);
8097 /* If we have already expanded the slot, so don't do
8098 it again. (mrs) */
8099 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8100 return target;
8101 }
8102 else
8103 {
8104 target = assign_temp (type, 2, 0, 1);
8105 /* All temp slots at this level must not conflict. */
8106 preserve_temp_slots (target);
8107 DECL_RTL (slot) = target;
8108 if (TREE_ADDRESSABLE (slot))
8109 {
8110 TREE_ADDRESSABLE (slot) = 0;
8111 mark_addressable (slot);
8112 }
8113
8114 /* Since SLOT is not known to the called function
8115 to belong to its stack frame, we must build an explicit
8116 cleanup. This case occurs when we must build up a reference
8117 to pass the reference as an argument. In this case,
8118 it is very likely that such a reference need not be
8119 built here. */
8120
8121 if (TREE_OPERAND (exp, 2) == 0)
8122 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8123 cleanups = TREE_OPERAND (exp, 2);
8124 }
8125 }
8126 else
8127 {
8128 /* This case does occur, when expanding a parameter which
8129 needs to be constructed on the stack. The target
8130 is the actual stack address that we want to initialize.
8131 The function we call will perform the cleanup in this case. */
8132
8133 /* If we have already assigned it space, use that space,
8134 not target that we were passed in, as our target
8135 parameter is only a hint. */
8136 if (DECL_RTL (slot) != 0)
8137 {
8138 target = DECL_RTL (slot);
8139 /* If we have already expanded the slot, so don't do
8140 it again. (mrs) */
8141 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8142 return target;
8143 }
8144 else
8145 {
8146 DECL_RTL (slot) = target;
8147 /* If we must have an addressable slot, then make sure that
8148 the RTL that we just stored in slot is OK. */
8149 if (TREE_ADDRESSABLE (slot))
8150 {
8151 TREE_ADDRESSABLE (slot) = 0;
8152 mark_addressable (slot);
8153 }
8154 }
8155 }
8156
8157 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8158 /* Mark it as expanded. */
8159 TREE_OPERAND (exp, 1) = NULL_TREE;
8160
8161 store_expr (exp1, target, 0);
8162
8163 expand_decl_cleanup (NULL_TREE, cleanups);
8164
8165 return target;
8166 }
8167
8168 case INIT_EXPR:
8169 {
8170 tree lhs = TREE_OPERAND (exp, 0);
8171 tree rhs = TREE_OPERAND (exp, 1);
8172 tree noncopied_parts = 0;
8173 tree lhs_type = TREE_TYPE (lhs);
8174
8175 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8176 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8177 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8178 TYPE_NONCOPIED_PARTS (lhs_type));
8179 while (noncopied_parts != 0)
8180 {
8181 expand_assignment (TREE_VALUE (noncopied_parts),
8182 TREE_PURPOSE (noncopied_parts), 0, 0);
8183 noncopied_parts = TREE_CHAIN (noncopied_parts);
8184 }
8185 return temp;
8186 }
8187
8188 case MODIFY_EXPR:
8189 {
8190 /* If lhs is complex, expand calls in rhs before computing it.
8191 That's so we don't compute a pointer and save it over a call.
8192 If lhs is simple, compute it first so we can give it as a
8193 target if the rhs is just a call. This avoids an extra temp and copy
8194 and that prevents a partial-subsumption which makes bad code.
8195 Actually we could treat component_ref's of vars like vars. */
8196
8197 tree lhs = TREE_OPERAND (exp, 0);
8198 tree rhs = TREE_OPERAND (exp, 1);
8199 tree noncopied_parts = 0;
8200 tree lhs_type = TREE_TYPE (lhs);
8201
8202 temp = 0;
8203
8204 if (TREE_CODE (lhs) != VAR_DECL
8205 && TREE_CODE (lhs) != RESULT_DECL
8206 && TREE_CODE (lhs) != PARM_DECL
8207 && ! (TREE_CODE (lhs) == INDIRECT_REF
8208 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8209 preexpand_calls (exp);
8210
8211 /* Check for |= or &= of a bitfield of size one into another bitfield
8212 of size 1. In this case, (unless we need the result of the
8213 assignment) we can do this more efficiently with a
8214 test followed by an assignment, if necessary.
8215
8216 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8217 things change so we do, this code should be enhanced to
8218 support it. */
8219 if (ignore
8220 && TREE_CODE (lhs) == COMPONENT_REF
8221 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8222 || TREE_CODE (rhs) == BIT_AND_EXPR)
8223 && TREE_OPERAND (rhs, 0) == lhs
8224 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8225 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8226 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8227 {
8228 rtx label = gen_label_rtx ();
8229
8230 do_jump (TREE_OPERAND (rhs, 1),
8231 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8232 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8233 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8234 (TREE_CODE (rhs) == BIT_IOR_EXPR
8235 ? integer_one_node
8236 : integer_zero_node)),
8237 0, 0);
8238 do_pending_stack_adjust ();
8239 emit_label (label);
8240 return const0_rtx;
8241 }
8242
8243 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8244 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8245 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8246 TYPE_NONCOPIED_PARTS (lhs_type));
8247
8248 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8249 while (noncopied_parts != 0)
8250 {
8251 expand_assignment (TREE_PURPOSE (noncopied_parts),
8252 TREE_VALUE (noncopied_parts), 0, 0);
8253 noncopied_parts = TREE_CHAIN (noncopied_parts);
8254 }
8255 return temp;
8256 }
8257
8258 case RETURN_EXPR:
8259 if (!TREE_OPERAND (exp, 0))
8260 expand_null_return ();
8261 else
8262 expand_return (TREE_OPERAND (exp, 0));
8263 return const0_rtx;
8264
8265 case PREINCREMENT_EXPR:
8266 case PREDECREMENT_EXPR:
8267 return expand_increment (exp, 0, ignore);
8268
8269 case POSTINCREMENT_EXPR:
8270 case POSTDECREMENT_EXPR:
8271 /* Faster to treat as pre-increment if result is not used. */
8272 return expand_increment (exp, ! ignore, ignore);
8273
8274 case ADDR_EXPR:
8275 /* If nonzero, TEMP will be set to the address of something that might
8276 be a MEM corresponding to a stack slot. */
8277 temp = 0;
8278
8279 /* Are we taking the address of a nested function? */
8280 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8281 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8282 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8283 && ! TREE_STATIC (exp))
8284 {
8285 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8286 op0 = force_operand (op0, target);
8287 }
8288 /* If we are taking the address of something erroneous, just
8289 return a zero. */
8290 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8291 return const0_rtx;
8292 else
8293 {
8294 /* We make sure to pass const0_rtx down if we came in with
8295 ignore set, to avoid doing the cleanups twice for something. */
8296 op0 = expand_expr (TREE_OPERAND (exp, 0),
8297 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8298 (modifier == EXPAND_INITIALIZER
8299 ? modifier : EXPAND_CONST_ADDRESS));
8300
8301 /* If we are going to ignore the result, OP0 will have been set
8302 to const0_rtx, so just return it. Don't get confused and
8303 think we are taking the address of the constant. */
8304 if (ignore)
8305 return op0;
8306
8307 op0 = protect_from_queue (op0, 0);
8308
8309 /* We would like the object in memory. If it is a constant, we can
8310 have it be statically allocated into memory. For a non-constant,
8311 we need to allocate some memory and store the value into it. */
8312
8313 if (CONSTANT_P (op0))
8314 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8315 op0);
8316 else if (GET_CODE (op0) == MEM)
8317 {
8318 mark_temp_addr_taken (op0);
8319 temp = XEXP (op0, 0);
8320 }
8321
8322 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8323 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8324 {
8325 /* If this object is in a register, it must be not
8326 be BLKmode. */
8327 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8328 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8329
8330 mark_temp_addr_taken (memloc);
8331 emit_move_insn (memloc, op0);
8332 op0 = memloc;
8333 }
8334
8335 if (GET_CODE (op0) != MEM)
8336 abort ();
8337
8338 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8339 {
8340 temp = XEXP (op0, 0);
8341 #ifdef POINTERS_EXTEND_UNSIGNED
8342 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8343 && mode == ptr_mode)
8344 temp = convert_memory_address (ptr_mode, temp);
8345 #endif
8346 return temp;
8347 }
8348
8349 op0 = force_operand (XEXP (op0, 0), target);
8350 }
8351
8352 if (flag_force_addr && GET_CODE (op0) != REG)
8353 op0 = force_reg (Pmode, op0);
8354
8355 if (GET_CODE (op0) == REG
8356 && ! REG_USERVAR_P (op0))
8357 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8358
8359 /* If we might have had a temp slot, add an equivalent address
8360 for it. */
8361 if (temp != 0)
8362 update_temp_slot_address (temp, op0);
8363
8364 #ifdef POINTERS_EXTEND_UNSIGNED
8365 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8366 && mode == ptr_mode)
8367 op0 = convert_memory_address (ptr_mode, op0);
8368 #endif
8369
8370 return op0;
8371
8372 case ENTRY_VALUE_EXPR:
8373 abort ();
8374
8375 /* COMPLEX type for Extended Pascal & Fortran */
8376 case COMPLEX_EXPR:
8377 {
8378 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8379 rtx insns;
8380
8381 /* Get the rtx code of the operands. */
8382 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8383 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8384
8385 if (! target)
8386 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8387
8388 start_sequence ();
8389
8390 /* Move the real (op0) and imaginary (op1) parts to their location. */
8391 emit_move_insn (gen_realpart (mode, target), op0);
8392 emit_move_insn (gen_imagpart (mode, target), op1);
8393
8394 insns = get_insns ();
8395 end_sequence ();
8396
8397 /* Complex construction should appear as a single unit. */
8398 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8399 each with a separate pseudo as destination.
8400 It's not correct for flow to treat them as a unit. */
8401 if (GET_CODE (target) != CONCAT)
8402 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8403 else
8404 emit_insns (insns);
8405
8406 return target;
8407 }
8408
8409 case REALPART_EXPR:
8410 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8411 return gen_realpart (mode, op0);
8412
8413 case IMAGPART_EXPR:
8414 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8415 return gen_imagpart (mode, op0);
8416
8417 case CONJ_EXPR:
8418 {
8419 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8420 rtx imag_t;
8421 rtx insns;
8422
8423 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8424
8425 if (! target)
8426 target = gen_reg_rtx (mode);
8427
8428 start_sequence ();
8429
8430 /* Store the realpart and the negated imagpart to target. */
8431 emit_move_insn (gen_realpart (partmode, target),
8432 gen_realpart (partmode, op0));
8433
8434 imag_t = gen_imagpart (partmode, target);
8435 temp = expand_unop (partmode, neg_optab,
8436 gen_imagpart (partmode, op0), imag_t, 0);
8437 if (temp != imag_t)
8438 emit_move_insn (imag_t, temp);
8439
8440 insns = get_insns ();
8441 end_sequence ();
8442
8443 /* Conjugate should appear as a single unit
8444 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8445 each with a separate pseudo as destination.
8446 It's not correct for flow to treat them as a unit. */
8447 if (GET_CODE (target) != CONCAT)
8448 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8449 else
8450 emit_insns (insns);
8451
8452 return target;
8453 }
8454
8455 case TRY_CATCH_EXPR:
8456 {
8457 tree handler = TREE_OPERAND (exp, 1);
8458
8459 expand_eh_region_start ();
8460
8461 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8462
8463 expand_eh_region_end (handler);
8464
8465 return op0;
8466 }
8467
8468 case TRY_FINALLY_EXPR:
8469 {
8470 tree try_block = TREE_OPERAND (exp, 0);
8471 tree finally_block = TREE_OPERAND (exp, 1);
8472 rtx finally_label = gen_label_rtx ();
8473 rtx done_label = gen_label_rtx ();
8474 rtx return_link = gen_reg_rtx (Pmode);
8475 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8476 (tree) finally_label, (tree) return_link);
8477 TREE_SIDE_EFFECTS (cleanup) = 1;
8478
8479 /* Start a new binding layer that will keep track of all cleanup
8480 actions to be performed. */
8481 expand_start_bindings (2);
8482
8483 target_temp_slot_level = temp_slot_level;
8484
8485 expand_decl_cleanup (NULL_TREE, cleanup);
8486 op0 = expand_expr (try_block, target, tmode, modifier);
8487
8488 preserve_temp_slots (op0);
8489 expand_end_bindings (NULL_TREE, 0, 0);
8490 emit_jump (done_label);
8491 emit_label (finally_label);
8492 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8493 emit_indirect_jump (return_link);
8494 emit_label (done_label);
8495 return op0;
8496 }
8497
8498 case GOTO_SUBROUTINE_EXPR:
8499 {
8500 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8501 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8502 rtx return_address = gen_label_rtx ();
8503 emit_move_insn (return_link,
8504 gen_rtx_LABEL_REF (Pmode, return_address));
8505 emit_jump (subr);
8506 emit_label (return_address);
8507 return const0_rtx;
8508 }
8509
8510 case POPDCC_EXPR:
8511 {
8512 rtx dcc = get_dynamic_cleanup_chain ();
8513 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8514 return const0_rtx;
8515 }
8516
8517 case POPDHC_EXPR:
8518 {
8519 rtx dhc = get_dynamic_handler_chain ();
8520 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8521 return const0_rtx;
8522 }
8523
8524 case VA_ARG_EXPR:
8525 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8526
8527 default:
8528 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8529 }
8530
8531 /* Here to do an ordinary binary operator, generating an instruction
8532 from the optab already placed in `this_optab'. */
8533 binop:
8534 preexpand_calls (exp);
8535 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8536 subtarget = 0;
8537 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8538 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8539 binop2:
8540 temp = expand_binop (mode, this_optab, op0, op1, target,
8541 unsignedp, OPTAB_LIB_WIDEN);
8542 if (temp == 0)
8543 abort ();
8544 return temp;
8545 }
8546 \f
8547 /* Similar to expand_expr, except that we don't specify a target, target
8548 mode, or modifier and we return the alignment of the inner type. This is
8549 used in cases where it is not necessary to align the result to the
8550 alignment of its type as long as we know the alignment of the result, for
8551 example for comparisons of BLKmode values. */
8552
8553 static rtx
8554 expand_expr_unaligned (exp, palign)
8555 register tree exp;
8556 unsigned int *palign;
8557 {
8558 register rtx op0;
8559 tree type = TREE_TYPE (exp);
8560 register enum machine_mode mode = TYPE_MODE (type);
8561
8562 /* Default the alignment we return to that of the type. */
8563 *palign = TYPE_ALIGN (type);
8564
8565 /* The only cases in which we do anything special is if the resulting mode
8566 is BLKmode. */
8567 if (mode != BLKmode)
8568 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8569
8570 switch (TREE_CODE (exp))
8571 {
8572 case CONVERT_EXPR:
8573 case NOP_EXPR:
8574 case NON_LVALUE_EXPR:
8575 /* Conversions between BLKmode values don't change the underlying
8576 alignment or value. */
8577 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8578 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8579 break;
8580
8581 case ARRAY_REF:
8582 /* Much of the code for this case is copied directly from expand_expr.
8583 We need to duplicate it here because we will do something different
8584 in the fall-through case, so we need to handle the same exceptions
8585 it does. */
8586 {
8587 tree array = TREE_OPERAND (exp, 0);
8588 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8589 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8590 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8591 HOST_WIDE_INT i;
8592
8593 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8594 abort ();
8595
8596 /* Optimize the special-case of a zero lower bound.
8597
8598 We convert the low_bound to sizetype to avoid some problems
8599 with constant folding. (E.g. suppose the lower bound is 1,
8600 and its mode is QI. Without the conversion, (ARRAY
8601 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8602 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8603
8604 if (! integer_zerop (low_bound))
8605 index = size_diffop (index, convert (sizetype, low_bound));
8606
8607 /* If this is a constant index into a constant array,
8608 just get the value from the array. Handle both the cases when
8609 we have an explicit constructor and when our operand is a variable
8610 that was declared const. */
8611
8612 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8613 && 0 > compare_tree_int (index,
8614 list_length (CONSTRUCTOR_ELTS
8615 (TREE_OPERAND (exp, 0)))))
8616 {
8617 tree elem;
8618
8619 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8620 i = TREE_INT_CST_LOW (index);
8621 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8622 ;
8623
8624 if (elem)
8625 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8626 }
8627
8628 else if (optimize >= 1
8629 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8630 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8631 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8632 {
8633 if (TREE_CODE (index) == INTEGER_CST)
8634 {
8635 tree init = DECL_INITIAL (array);
8636
8637 if (TREE_CODE (init) == CONSTRUCTOR)
8638 {
8639 tree elem;
8640
8641 for (elem = CONSTRUCTOR_ELTS (init);
8642 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8643 elem = TREE_CHAIN (elem))
8644 ;
8645
8646 if (elem)
8647 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8648 palign);
8649 }
8650 }
8651 }
8652 }
8653 /* Fall through. */
8654
8655 case COMPONENT_REF:
8656 case BIT_FIELD_REF:
8657 /* If the operand is a CONSTRUCTOR, we can just extract the
8658 appropriate field if it is present. Don't do this if we have
8659 already written the data since we want to refer to that copy
8660 and varasm.c assumes that's what we'll do. */
8661 if (TREE_CODE (exp) != ARRAY_REF
8662 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8663 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8664 {
8665 tree elt;
8666
8667 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8668 elt = TREE_CHAIN (elt))
8669 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8670 /* Note that unlike the case in expand_expr, we know this is
8671 BLKmode and hence not an integer. */
8672 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8673 }
8674
8675 {
8676 enum machine_mode mode1;
8677 HOST_WIDE_INT bitsize, bitpos;
8678 tree offset;
8679 int volatilep = 0;
8680 unsigned int alignment;
8681 int unsignedp;
8682 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8683 &mode1, &unsignedp, &volatilep,
8684 &alignment);
8685
8686 /* If we got back the original object, something is wrong. Perhaps
8687 we are evaluating an expression too early. In any event, don't
8688 infinitely recurse. */
8689 if (tem == exp)
8690 abort ();
8691
8692 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8693
8694 /* If this is a constant, put it into a register if it is a
8695 legitimate constant and OFFSET is 0 and memory if it isn't. */
8696 if (CONSTANT_P (op0))
8697 {
8698 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8699
8700 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8701 && offset == 0)
8702 op0 = force_reg (inner_mode, op0);
8703 else
8704 op0 = validize_mem (force_const_mem (inner_mode, op0));
8705 }
8706
8707 if (offset != 0)
8708 {
8709 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8710
8711 /* If this object is in a register, put it into memory.
8712 This case can't occur in C, but can in Ada if we have
8713 unchecked conversion of an expression from a scalar type to
8714 an array or record type. */
8715 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8716 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8717 {
8718 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8719
8720 mark_temp_addr_taken (memloc);
8721 emit_move_insn (memloc, op0);
8722 op0 = memloc;
8723 }
8724
8725 if (GET_CODE (op0) != MEM)
8726 abort ();
8727
8728 if (GET_MODE (offset_rtx) != ptr_mode)
8729 {
8730 #ifdef POINTERS_EXTEND_UNSIGNED
8731 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8732 #else
8733 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8734 #endif
8735 }
8736
8737 op0 = change_address (op0, VOIDmode,
8738 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8739 force_reg (ptr_mode,
8740 offset_rtx)));
8741 }
8742
8743 /* Don't forget about volatility even if this is a bitfield. */
8744 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8745 {
8746 op0 = copy_rtx (op0);
8747 MEM_VOLATILE_P (op0) = 1;
8748 }
8749
8750 /* Check the access. */
8751 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8752 {
8753 rtx to;
8754 int size;
8755
8756 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8757 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8758
8759 /* Check the access right of the pointer. */
8760 in_check_memory_usage = 1;
8761 if (size > BITS_PER_UNIT)
8762 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8763 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8764 TYPE_MODE (sizetype),
8765 GEN_INT (MEMORY_USE_RO),
8766 TYPE_MODE (integer_type_node));
8767 in_check_memory_usage = 0;
8768 }
8769
8770 /* In cases where an aligned union has an unaligned object
8771 as a field, we might be extracting a BLKmode value from
8772 an integer-mode (e.g., SImode) object. Handle this case
8773 by doing the extract into an object as wide as the field
8774 (which we know to be the width of a basic mode), then
8775 storing into memory, and changing the mode to BLKmode.
8776 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8777 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8778 if (mode1 == VOIDmode
8779 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8780 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8781 && (TYPE_ALIGN (type) > alignment
8782 || bitpos % TYPE_ALIGN (type) != 0)))
8783 {
8784 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8785
8786 if (ext_mode == BLKmode)
8787 {
8788 /* In this case, BITPOS must start at a byte boundary. */
8789 if (GET_CODE (op0) != MEM
8790 || bitpos % BITS_PER_UNIT != 0)
8791 abort ();
8792
8793 op0 = change_address (op0, VOIDmode,
8794 plus_constant (XEXP (op0, 0),
8795 bitpos / BITS_PER_UNIT));
8796 }
8797 else
8798 {
8799 rtx new = assign_stack_temp (ext_mode,
8800 bitsize / BITS_PER_UNIT, 0);
8801
8802 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8803 unsignedp, NULL_RTX, ext_mode,
8804 ext_mode, alignment,
8805 int_size_in_bytes (TREE_TYPE (tem)));
8806
8807 /* If the result is a record type and BITSIZE is narrower than
8808 the mode of OP0, an integral mode, and this is a big endian
8809 machine, we must put the field into the high-order bits. */
8810 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8811 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8812 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8813 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8814 size_int (GET_MODE_BITSIZE
8815 (GET_MODE (op0))
8816 - bitsize),
8817 op0, 1);
8818
8819 emit_move_insn (new, op0);
8820 op0 = copy_rtx (new);
8821 PUT_MODE (op0, BLKmode);
8822 }
8823 }
8824 else
8825 /* Get a reference to just this component. */
8826 op0 = change_address (op0, mode1,
8827 plus_constant (XEXP (op0, 0),
8828 (bitpos / BITS_PER_UNIT)));
8829
8830 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8831
8832 /* Adjust the alignment in case the bit position is not
8833 a multiple of the alignment of the inner object. */
8834 while (bitpos % alignment != 0)
8835 alignment >>= 1;
8836
8837 if (GET_CODE (XEXP (op0, 0)) == REG)
8838 mark_reg_pointer (XEXP (op0, 0), alignment);
8839
8840 MEM_IN_STRUCT_P (op0) = 1;
8841 MEM_VOLATILE_P (op0) |= volatilep;
8842
8843 *palign = alignment;
8844 return op0;
8845 }
8846
8847 default:
8848 break;
8849
8850 }
8851
8852 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8853 }
8854 \f
8855 /* Return the tree node if a ARG corresponds to a string constant or zero
8856 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8857 in bytes within the string that ARG is accessing. The type of the
8858 offset will be `sizetype'. */
8859
8860 tree
8861 string_constant (arg, ptr_offset)
8862 tree arg;
8863 tree *ptr_offset;
8864 {
8865 STRIP_NOPS (arg);
8866
8867 if (TREE_CODE (arg) == ADDR_EXPR
8868 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8869 {
8870 *ptr_offset = size_zero_node;
8871 return TREE_OPERAND (arg, 0);
8872 }
8873 else if (TREE_CODE (arg) == PLUS_EXPR)
8874 {
8875 tree arg0 = TREE_OPERAND (arg, 0);
8876 tree arg1 = TREE_OPERAND (arg, 1);
8877
8878 STRIP_NOPS (arg0);
8879 STRIP_NOPS (arg1);
8880
8881 if (TREE_CODE (arg0) == ADDR_EXPR
8882 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8883 {
8884 *ptr_offset = convert (sizetype, arg1);
8885 return TREE_OPERAND (arg0, 0);
8886 }
8887 else if (TREE_CODE (arg1) == ADDR_EXPR
8888 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8889 {
8890 *ptr_offset = convert (sizetype, arg0);
8891 return TREE_OPERAND (arg1, 0);
8892 }
8893 }
8894
8895 return 0;
8896 }
8897 \f
8898 /* Expand code for a post- or pre- increment or decrement
8899 and return the RTX for the result.
8900 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8901
8902 static rtx
8903 expand_increment (exp, post, ignore)
8904 register tree exp;
8905 int post, ignore;
8906 {
8907 register rtx op0, op1;
8908 register rtx temp, value;
8909 register tree incremented = TREE_OPERAND (exp, 0);
8910 optab this_optab = add_optab;
8911 int icode;
8912 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8913 int op0_is_copy = 0;
8914 int single_insn = 0;
8915 /* 1 means we can't store into OP0 directly,
8916 because it is a subreg narrower than a word,
8917 and we don't dare clobber the rest of the word. */
8918 int bad_subreg = 0;
8919
8920 /* Stabilize any component ref that might need to be
8921 evaluated more than once below. */
8922 if (!post
8923 || TREE_CODE (incremented) == BIT_FIELD_REF
8924 || (TREE_CODE (incremented) == COMPONENT_REF
8925 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8926 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8927 incremented = stabilize_reference (incremented);
8928 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8929 ones into save exprs so that they don't accidentally get evaluated
8930 more than once by the code below. */
8931 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8932 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8933 incremented = save_expr (incremented);
8934
8935 /* Compute the operands as RTX.
8936 Note whether OP0 is the actual lvalue or a copy of it:
8937 I believe it is a copy iff it is a register or subreg
8938 and insns were generated in computing it. */
8939
8940 temp = get_last_insn ();
8941 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8942
8943 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8944 in place but instead must do sign- or zero-extension during assignment,
8945 so we copy it into a new register and let the code below use it as
8946 a copy.
8947
8948 Note that we can safely modify this SUBREG since it is know not to be
8949 shared (it was made by the expand_expr call above). */
8950
8951 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8952 {
8953 if (post)
8954 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8955 else
8956 bad_subreg = 1;
8957 }
8958 else if (GET_CODE (op0) == SUBREG
8959 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8960 {
8961 /* We cannot increment this SUBREG in place. If we are
8962 post-incrementing, get a copy of the old value. Otherwise,
8963 just mark that we cannot increment in place. */
8964 if (post)
8965 op0 = copy_to_reg (op0);
8966 else
8967 bad_subreg = 1;
8968 }
8969
8970 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8971 && temp != get_last_insn ());
8972 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8973 EXPAND_MEMORY_USE_BAD);
8974
8975 /* Decide whether incrementing or decrementing. */
8976 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8977 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8978 this_optab = sub_optab;
8979
8980 /* Convert decrement by a constant into a negative increment. */
8981 if (this_optab == sub_optab
8982 && GET_CODE (op1) == CONST_INT)
8983 {
8984 op1 = GEN_INT (-INTVAL (op1));
8985 this_optab = add_optab;
8986 }
8987
8988 /* For a preincrement, see if we can do this with a single instruction. */
8989 if (!post)
8990 {
8991 icode = (int) this_optab->handlers[(int) mode].insn_code;
8992 if (icode != (int) CODE_FOR_nothing
8993 /* Make sure that OP0 is valid for operands 0 and 1
8994 of the insn we want to queue. */
8995 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8996 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8997 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8998 single_insn = 1;
8999 }
9000
9001 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9002 then we cannot just increment OP0. We must therefore contrive to
9003 increment the original value. Then, for postincrement, we can return
9004 OP0 since it is a copy of the old value. For preincrement, expand here
9005 unless we can do it with a single insn.
9006
9007 Likewise if storing directly into OP0 would clobber high bits
9008 we need to preserve (bad_subreg). */
9009 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9010 {
9011 /* This is the easiest way to increment the value wherever it is.
9012 Problems with multiple evaluation of INCREMENTED are prevented
9013 because either (1) it is a component_ref or preincrement,
9014 in which case it was stabilized above, or (2) it is an array_ref
9015 with constant index in an array in a register, which is
9016 safe to reevaluate. */
9017 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9018 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9019 ? MINUS_EXPR : PLUS_EXPR),
9020 TREE_TYPE (exp),
9021 incremented,
9022 TREE_OPERAND (exp, 1));
9023
9024 while (TREE_CODE (incremented) == NOP_EXPR
9025 || TREE_CODE (incremented) == CONVERT_EXPR)
9026 {
9027 newexp = convert (TREE_TYPE (incremented), newexp);
9028 incremented = TREE_OPERAND (incremented, 0);
9029 }
9030
9031 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9032 return post ? op0 : temp;
9033 }
9034
9035 if (post)
9036 {
9037 /* We have a true reference to the value in OP0.
9038 If there is an insn to add or subtract in this mode, queue it.
9039 Queueing the increment insn avoids the register shuffling
9040 that often results if we must increment now and first save
9041 the old value for subsequent use. */
9042
9043 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9044 op0 = stabilize (op0);
9045 #endif
9046
9047 icode = (int) this_optab->handlers[(int) mode].insn_code;
9048 if (icode != (int) CODE_FOR_nothing
9049 /* Make sure that OP0 is valid for operands 0 and 1
9050 of the insn we want to queue. */
9051 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9052 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9053 {
9054 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9055 op1 = force_reg (mode, op1);
9056
9057 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9058 }
9059 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9060 {
9061 rtx addr = (general_operand (XEXP (op0, 0), mode)
9062 ? force_reg (Pmode, XEXP (op0, 0))
9063 : copy_to_reg (XEXP (op0, 0)));
9064 rtx temp, result;
9065
9066 op0 = change_address (op0, VOIDmode, addr);
9067 temp = force_reg (GET_MODE (op0), op0);
9068 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9069 op1 = force_reg (mode, op1);
9070
9071 /* The increment queue is LIFO, thus we have to `queue'
9072 the instructions in reverse order. */
9073 enqueue_insn (op0, gen_move_insn (op0, temp));
9074 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9075 return result;
9076 }
9077 }
9078
9079 /* Preincrement, or we can't increment with one simple insn. */
9080 if (post)
9081 /* Save a copy of the value before inc or dec, to return it later. */
9082 temp = value = copy_to_reg (op0);
9083 else
9084 /* Arrange to return the incremented value. */
9085 /* Copy the rtx because expand_binop will protect from the queue,
9086 and the results of that would be invalid for us to return
9087 if our caller does emit_queue before using our result. */
9088 temp = copy_rtx (value = op0);
9089
9090 /* Increment however we can. */
9091 op1 = expand_binop (mode, this_optab, value, op1,
9092 current_function_check_memory_usage ? NULL_RTX : op0,
9093 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9094 /* Make sure the value is stored into OP0. */
9095 if (op1 != op0)
9096 emit_move_insn (op0, op1);
9097
9098 return temp;
9099 }
9100 \f
9101 /* Expand all function calls contained within EXP, innermost ones first.
9102 But don't look within expressions that have sequence points.
9103 For each CALL_EXPR, record the rtx for its value
9104 in the CALL_EXPR_RTL field. */
9105
9106 static void
9107 preexpand_calls (exp)
9108 tree exp;
9109 {
9110 register int nops, i;
9111 int class = TREE_CODE_CLASS (TREE_CODE (exp));
9112
9113 if (! do_preexpand_calls)
9114 return;
9115
9116 /* Only expressions and references can contain calls. */
9117
9118 if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
9119 return;
9120
9121 switch (TREE_CODE (exp))
9122 {
9123 case CALL_EXPR:
9124 /* Do nothing if already expanded. */
9125 if (CALL_EXPR_RTL (exp) != 0
9126 /* Do nothing if the call returns a variable-sized object. */
9127 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9128 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9129 /* Do nothing to built-in functions. */
9130 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9131 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9132 == FUNCTION_DECL)
9133 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9134 return;
9135
9136 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9137 return;
9138
9139 case COMPOUND_EXPR:
9140 case COND_EXPR:
9141 case TRUTH_ANDIF_EXPR:
9142 case TRUTH_ORIF_EXPR:
9143 /* If we find one of these, then we can be sure
9144 the adjust will be done for it (since it makes jumps).
9145 Do it now, so that if this is inside an argument
9146 of a function, we don't get the stack adjustment
9147 after some other args have already been pushed. */
9148 do_pending_stack_adjust ();
9149 return;
9150
9151 case BLOCK:
9152 case RTL_EXPR:
9153 case WITH_CLEANUP_EXPR:
9154 case CLEANUP_POINT_EXPR:
9155 case TRY_CATCH_EXPR:
9156 return;
9157
9158 case SAVE_EXPR:
9159 if (SAVE_EXPR_RTL (exp) != 0)
9160 return;
9161
9162 default:
9163 break;
9164 }
9165
9166 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
9167 for (i = 0; i < nops; i++)
9168 if (TREE_OPERAND (exp, i) != 0)
9169 {
9170 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9171 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9172 It doesn't happen before the call is made. */
9173 ;
9174 else
9175 {
9176 class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9177 if (IS_EXPR_CODE_CLASS (class) || class == 'r')
9178 preexpand_calls (TREE_OPERAND (exp, i));
9179 }
9180 }
9181 }
9182 \f
9183 /* At the start of a function, record that we have no previously-pushed
9184 arguments waiting to be popped. */
9185
9186 void
9187 init_pending_stack_adjust ()
9188 {
9189 pending_stack_adjust = 0;
9190 }
9191
9192 /* When exiting from function, if safe, clear out any pending stack adjust
9193 so the adjustment won't get done.
9194
9195 Note, if the current function calls alloca, then it must have a
9196 frame pointer regardless of the value of flag_omit_frame_pointer. */
9197
9198 void
9199 clear_pending_stack_adjust ()
9200 {
9201 #ifdef EXIT_IGNORE_STACK
9202 if (optimize > 0
9203 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9204 && EXIT_IGNORE_STACK
9205 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9206 && ! flag_inline_functions)
9207 {
9208 stack_pointer_delta -= pending_stack_adjust,
9209 pending_stack_adjust = 0;
9210 }
9211 #endif
9212 }
9213
9214 /* Pop any previously-pushed arguments that have not been popped yet. */
9215
9216 void
9217 do_pending_stack_adjust ()
9218 {
9219 if (inhibit_defer_pop == 0)
9220 {
9221 if (pending_stack_adjust != 0)
9222 adjust_stack (GEN_INT (pending_stack_adjust));
9223 pending_stack_adjust = 0;
9224 }
9225 }
9226 \f
9227 /* Expand conditional expressions. */
9228
9229 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9230 LABEL is an rtx of code CODE_LABEL, in this function and all the
9231 functions here. */
9232
9233 void
9234 jumpifnot (exp, label)
9235 tree exp;
9236 rtx label;
9237 {
9238 do_jump (exp, label, NULL_RTX);
9239 }
9240
9241 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9242
9243 void
9244 jumpif (exp, label)
9245 tree exp;
9246 rtx label;
9247 {
9248 do_jump (exp, NULL_RTX, label);
9249 }
9250
9251 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9252 the result is zero, or IF_TRUE_LABEL if the result is one.
9253 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9254 meaning fall through in that case.
9255
9256 do_jump always does any pending stack adjust except when it does not
9257 actually perform a jump. An example where there is no jump
9258 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9259
9260 This function is responsible for optimizing cases such as
9261 &&, || and comparison operators in EXP. */
9262
9263 void
9264 do_jump (exp, if_false_label, if_true_label)
9265 tree exp;
9266 rtx if_false_label, if_true_label;
9267 {
9268 register enum tree_code code = TREE_CODE (exp);
9269 /* Some cases need to create a label to jump to
9270 in order to properly fall through.
9271 These cases set DROP_THROUGH_LABEL nonzero. */
9272 rtx drop_through_label = 0;
9273 rtx temp;
9274 int i;
9275 tree type;
9276 enum machine_mode mode;
9277
9278 #ifdef MAX_INTEGER_COMPUTATION_MODE
9279 check_max_integer_computation_mode (exp);
9280 #endif
9281
9282 emit_queue ();
9283
9284 switch (code)
9285 {
9286 case ERROR_MARK:
9287 break;
9288
9289 case INTEGER_CST:
9290 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9291 if (temp)
9292 emit_jump (temp);
9293 break;
9294
9295 #if 0
9296 /* This is not true with #pragma weak */
9297 case ADDR_EXPR:
9298 /* The address of something can never be zero. */
9299 if (if_true_label)
9300 emit_jump (if_true_label);
9301 break;
9302 #endif
9303
9304 case NOP_EXPR:
9305 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9306 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9307 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9308 goto normal;
9309 case CONVERT_EXPR:
9310 /* If we are narrowing the operand, we have to do the compare in the
9311 narrower mode. */
9312 if ((TYPE_PRECISION (TREE_TYPE (exp))
9313 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9314 goto normal;
9315 case NON_LVALUE_EXPR:
9316 case REFERENCE_EXPR:
9317 case ABS_EXPR:
9318 case NEGATE_EXPR:
9319 case LROTATE_EXPR:
9320 case RROTATE_EXPR:
9321 /* These cannot change zero->non-zero or vice versa. */
9322 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9323 break;
9324
9325 case WITH_RECORD_EXPR:
9326 /* Put the object on the placeholder list, recurse through our first
9327 operand, and pop the list. */
9328 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9329 placeholder_list);
9330 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9331 placeholder_list = TREE_CHAIN (placeholder_list);
9332 break;
9333
9334 #if 0
9335 /* This is never less insns than evaluating the PLUS_EXPR followed by
9336 a test and can be longer if the test is eliminated. */
9337 case PLUS_EXPR:
9338 /* Reduce to minus. */
9339 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9340 TREE_OPERAND (exp, 0),
9341 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9342 TREE_OPERAND (exp, 1))));
9343 /* Process as MINUS. */
9344 #endif
9345
9346 case MINUS_EXPR:
9347 /* Non-zero iff operands of minus differ. */
9348 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9349 TREE_OPERAND (exp, 0),
9350 TREE_OPERAND (exp, 1)),
9351 NE, NE, if_false_label, if_true_label);
9352 break;
9353
9354 case BIT_AND_EXPR:
9355 /* If we are AND'ing with a small constant, do this comparison in the
9356 smallest type that fits. If the machine doesn't have comparisons
9357 that small, it will be converted back to the wider comparison.
9358 This helps if we are testing the sign bit of a narrower object.
9359 combine can't do this for us because it can't know whether a
9360 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9361
9362 if (! SLOW_BYTE_ACCESS
9363 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9364 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9365 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9366 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9367 && (type = type_for_mode (mode, 1)) != 0
9368 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9369 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9370 != CODE_FOR_nothing))
9371 {
9372 do_jump (convert (type, exp), if_false_label, if_true_label);
9373 break;
9374 }
9375 goto normal;
9376
9377 case TRUTH_NOT_EXPR:
9378 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9379 break;
9380
9381 case TRUTH_ANDIF_EXPR:
9382 if (if_false_label == 0)
9383 if_false_label = drop_through_label = gen_label_rtx ();
9384 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9385 start_cleanup_deferral ();
9386 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9387 end_cleanup_deferral ();
9388 break;
9389
9390 case TRUTH_ORIF_EXPR:
9391 if (if_true_label == 0)
9392 if_true_label = drop_through_label = gen_label_rtx ();
9393 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9394 start_cleanup_deferral ();
9395 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9396 end_cleanup_deferral ();
9397 break;
9398
9399 case COMPOUND_EXPR:
9400 push_temp_slots ();
9401 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9402 preserve_temp_slots (NULL_RTX);
9403 free_temp_slots ();
9404 pop_temp_slots ();
9405 emit_queue ();
9406 do_pending_stack_adjust ();
9407 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9408 break;
9409
9410 case COMPONENT_REF:
9411 case BIT_FIELD_REF:
9412 case ARRAY_REF:
9413 {
9414 HOST_WIDE_INT bitsize, bitpos;
9415 int unsignedp;
9416 enum machine_mode mode;
9417 tree type;
9418 tree offset;
9419 int volatilep = 0;
9420 unsigned int alignment;
9421
9422 /* Get description of this reference. We don't actually care
9423 about the underlying object here. */
9424 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9425 &unsignedp, &volatilep, &alignment);
9426
9427 type = type_for_size (bitsize, unsignedp);
9428 if (! SLOW_BYTE_ACCESS
9429 && type != 0 && bitsize >= 0
9430 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9431 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9432 != CODE_FOR_nothing))
9433 {
9434 do_jump (convert (type, exp), if_false_label, if_true_label);
9435 break;
9436 }
9437 goto normal;
9438 }
9439
9440 case COND_EXPR:
9441 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9442 if (integer_onep (TREE_OPERAND (exp, 1))
9443 && integer_zerop (TREE_OPERAND (exp, 2)))
9444 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9445
9446 else if (integer_zerop (TREE_OPERAND (exp, 1))
9447 && integer_onep (TREE_OPERAND (exp, 2)))
9448 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9449
9450 else
9451 {
9452 register rtx label1 = gen_label_rtx ();
9453 drop_through_label = gen_label_rtx ();
9454
9455 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9456
9457 start_cleanup_deferral ();
9458 /* Now the THEN-expression. */
9459 do_jump (TREE_OPERAND (exp, 1),
9460 if_false_label ? if_false_label : drop_through_label,
9461 if_true_label ? if_true_label : drop_through_label);
9462 /* In case the do_jump just above never jumps. */
9463 do_pending_stack_adjust ();
9464 emit_label (label1);
9465
9466 /* Now the ELSE-expression. */
9467 do_jump (TREE_OPERAND (exp, 2),
9468 if_false_label ? if_false_label : drop_through_label,
9469 if_true_label ? if_true_label : drop_through_label);
9470 end_cleanup_deferral ();
9471 }
9472 break;
9473
9474 case EQ_EXPR:
9475 {
9476 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9477
9478 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9479 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9480 {
9481 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9482 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9483 do_jump
9484 (fold
9485 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9486 fold (build (EQ_EXPR, TREE_TYPE (exp),
9487 fold (build1 (REALPART_EXPR,
9488 TREE_TYPE (inner_type),
9489 exp0)),
9490 fold (build1 (REALPART_EXPR,
9491 TREE_TYPE (inner_type),
9492 exp1)))),
9493 fold (build (EQ_EXPR, TREE_TYPE (exp),
9494 fold (build1 (IMAGPART_EXPR,
9495 TREE_TYPE (inner_type),
9496 exp0)),
9497 fold (build1 (IMAGPART_EXPR,
9498 TREE_TYPE (inner_type),
9499 exp1)))))),
9500 if_false_label, if_true_label);
9501 }
9502
9503 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9504 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9505
9506 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9507 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9508 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9509 else
9510 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9511 break;
9512 }
9513
9514 case NE_EXPR:
9515 {
9516 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9517
9518 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9519 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9520 {
9521 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9522 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9523 do_jump
9524 (fold
9525 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9526 fold (build (NE_EXPR, TREE_TYPE (exp),
9527 fold (build1 (REALPART_EXPR,
9528 TREE_TYPE (inner_type),
9529 exp0)),
9530 fold (build1 (REALPART_EXPR,
9531 TREE_TYPE (inner_type),
9532 exp1)))),
9533 fold (build (NE_EXPR, TREE_TYPE (exp),
9534 fold (build1 (IMAGPART_EXPR,
9535 TREE_TYPE (inner_type),
9536 exp0)),
9537 fold (build1 (IMAGPART_EXPR,
9538 TREE_TYPE (inner_type),
9539 exp1)))))),
9540 if_false_label, if_true_label);
9541 }
9542
9543 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9544 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9545
9546 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9547 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9548 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9549 else
9550 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9551 break;
9552 }
9553
9554 case LT_EXPR:
9555 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9556 if (GET_MODE_CLASS (mode) == MODE_INT
9557 && ! can_compare_p (LT, mode, ccp_jump))
9558 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9559 else
9560 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9561 break;
9562
9563 case LE_EXPR:
9564 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9565 if (GET_MODE_CLASS (mode) == MODE_INT
9566 && ! can_compare_p (LE, mode, ccp_jump))
9567 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9568 else
9569 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9570 break;
9571
9572 case GT_EXPR:
9573 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9574 if (GET_MODE_CLASS (mode) == MODE_INT
9575 && ! can_compare_p (GT, mode, ccp_jump))
9576 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9577 else
9578 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9579 break;
9580
9581 case GE_EXPR:
9582 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9583 if (GET_MODE_CLASS (mode) == MODE_INT
9584 && ! can_compare_p (GE, mode, ccp_jump))
9585 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9586 else
9587 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9588 break;
9589
9590 case UNORDERED_EXPR:
9591 case ORDERED_EXPR:
9592 {
9593 enum rtx_code cmp, rcmp;
9594 int do_rev;
9595
9596 if (code == UNORDERED_EXPR)
9597 cmp = UNORDERED, rcmp = ORDERED;
9598 else
9599 cmp = ORDERED, rcmp = UNORDERED;
9600 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9601
9602 do_rev = 0;
9603 if (! can_compare_p (cmp, mode, ccp_jump)
9604 && (can_compare_p (rcmp, mode, ccp_jump)
9605 /* If the target doesn't provide either UNORDERED or ORDERED
9606 comparisons, canonicalize on UNORDERED for the library. */
9607 || rcmp == UNORDERED))
9608 do_rev = 1;
9609
9610 if (! do_rev)
9611 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9612 else
9613 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9614 }
9615 break;
9616
9617 {
9618 enum rtx_code rcode1;
9619 enum tree_code tcode2;
9620
9621 case UNLT_EXPR:
9622 rcode1 = UNLT;
9623 tcode2 = LT_EXPR;
9624 goto unordered_bcc;
9625 case UNLE_EXPR:
9626 rcode1 = UNLE;
9627 tcode2 = LE_EXPR;
9628 goto unordered_bcc;
9629 case UNGT_EXPR:
9630 rcode1 = UNGT;
9631 tcode2 = GT_EXPR;
9632 goto unordered_bcc;
9633 case UNGE_EXPR:
9634 rcode1 = UNGE;
9635 tcode2 = GE_EXPR;
9636 goto unordered_bcc;
9637 case UNEQ_EXPR:
9638 rcode1 = UNEQ;
9639 tcode2 = EQ_EXPR;
9640 goto unordered_bcc;
9641
9642 unordered_bcc:
9643 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9644 if (can_compare_p (rcode1, mode, ccp_jump))
9645 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9646 if_true_label);
9647 else
9648 {
9649 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9650 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9651 tree cmp0, cmp1;
9652
9653 /* If the target doesn't support combined unordered
9654 compares, decompose into UNORDERED + comparison. */
9655 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9656 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9657 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9658 do_jump (exp, if_false_label, if_true_label);
9659 }
9660 }
9661 break;
9662
9663 default:
9664 normal:
9665 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9666 #if 0
9667 /* This is not needed any more and causes poor code since it causes
9668 comparisons and tests from non-SI objects to have different code
9669 sequences. */
9670 /* Copy to register to avoid generating bad insns by cse
9671 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9672 if (!cse_not_expected && GET_CODE (temp) == MEM)
9673 temp = copy_to_reg (temp);
9674 #endif
9675 do_pending_stack_adjust ();
9676 /* Do any postincrements in the expression that was tested. */
9677 emit_queue ();
9678
9679 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9680 {
9681 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9682 if (target)
9683 emit_jump (target);
9684 }
9685 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9686 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9687 /* Note swapping the labels gives us not-equal. */
9688 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9689 else if (GET_MODE (temp) != VOIDmode)
9690 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9691 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9692 GET_MODE (temp), NULL_RTX, 0,
9693 if_false_label, if_true_label);
9694 else
9695 abort ();
9696 }
9697
9698 if (drop_through_label)
9699 {
9700 /* If do_jump produces code that might be jumped around,
9701 do any stack adjusts from that code, before the place
9702 where control merges in. */
9703 do_pending_stack_adjust ();
9704 emit_label (drop_through_label);
9705 }
9706 }
9707 \f
9708 /* Given a comparison expression EXP for values too wide to be compared
9709 with one insn, test the comparison and jump to the appropriate label.
9710 The code of EXP is ignored; we always test GT if SWAP is 0,
9711 and LT if SWAP is 1. */
9712
9713 static void
9714 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9715 tree exp;
9716 int swap;
9717 rtx if_false_label, if_true_label;
9718 {
9719 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9720 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9721 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9722 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9723
9724 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9725 }
9726
9727 /* Compare OP0 with OP1, word at a time, in mode MODE.
9728 UNSIGNEDP says to do unsigned comparison.
9729 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9730
9731 void
9732 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9733 enum machine_mode mode;
9734 int unsignedp;
9735 rtx op0, op1;
9736 rtx if_false_label, if_true_label;
9737 {
9738 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9739 rtx drop_through_label = 0;
9740 int i;
9741
9742 if (! if_true_label || ! if_false_label)
9743 drop_through_label = gen_label_rtx ();
9744 if (! if_true_label)
9745 if_true_label = drop_through_label;
9746 if (! if_false_label)
9747 if_false_label = drop_through_label;
9748
9749 /* Compare a word at a time, high order first. */
9750 for (i = 0; i < nwords; i++)
9751 {
9752 rtx op0_word, op1_word;
9753
9754 if (WORDS_BIG_ENDIAN)
9755 {
9756 op0_word = operand_subword_force (op0, i, mode);
9757 op1_word = operand_subword_force (op1, i, mode);
9758 }
9759 else
9760 {
9761 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9762 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9763 }
9764
9765 /* All but high-order word must be compared as unsigned. */
9766 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9767 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9768 NULL_RTX, if_true_label);
9769
9770 /* Consider lower words only if these are equal. */
9771 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9772 NULL_RTX, 0, NULL_RTX, if_false_label);
9773 }
9774
9775 if (if_false_label)
9776 emit_jump (if_false_label);
9777 if (drop_through_label)
9778 emit_label (drop_through_label);
9779 }
9780
9781 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9782 with one insn, test the comparison and jump to the appropriate label. */
9783
9784 static void
9785 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9786 tree exp;
9787 rtx if_false_label, if_true_label;
9788 {
9789 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9790 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9791 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9792 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9793 int i;
9794 rtx drop_through_label = 0;
9795
9796 if (! if_false_label)
9797 drop_through_label = if_false_label = gen_label_rtx ();
9798
9799 for (i = 0; i < nwords; i++)
9800 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9801 operand_subword_force (op1, i, mode),
9802 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9803 word_mode, NULL_RTX, 0, if_false_label,
9804 NULL_RTX);
9805
9806 if (if_true_label)
9807 emit_jump (if_true_label);
9808 if (drop_through_label)
9809 emit_label (drop_through_label);
9810 }
9811 \f
9812 /* Jump according to whether OP0 is 0.
9813 We assume that OP0 has an integer mode that is too wide
9814 for the available compare insns. */
9815
9816 void
9817 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9818 rtx op0;
9819 rtx if_false_label, if_true_label;
9820 {
9821 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9822 rtx part;
9823 int i;
9824 rtx drop_through_label = 0;
9825
9826 /* The fastest way of doing this comparison on almost any machine is to
9827 "or" all the words and compare the result. If all have to be loaded
9828 from memory and this is a very wide item, it's possible this may
9829 be slower, but that's highly unlikely. */
9830
9831 part = gen_reg_rtx (word_mode);
9832 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9833 for (i = 1; i < nwords && part != 0; i++)
9834 part = expand_binop (word_mode, ior_optab, part,
9835 operand_subword_force (op0, i, GET_MODE (op0)),
9836 part, 1, OPTAB_WIDEN);
9837
9838 if (part != 0)
9839 {
9840 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9841 NULL_RTX, 0, if_false_label, if_true_label);
9842
9843 return;
9844 }
9845
9846 /* If we couldn't do the "or" simply, do this with a series of compares. */
9847 if (! if_false_label)
9848 drop_through_label = if_false_label = gen_label_rtx ();
9849
9850 for (i = 0; i < nwords; i++)
9851 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9852 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9853 if_false_label, NULL_RTX);
9854
9855 if (if_true_label)
9856 emit_jump (if_true_label);
9857
9858 if (drop_through_label)
9859 emit_label (drop_through_label);
9860 }
9861 \f
9862 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9863 (including code to compute the values to be compared)
9864 and set (CC0) according to the result.
9865 The decision as to signed or unsigned comparison must be made by the caller.
9866
9867 We force a stack adjustment unless there are currently
9868 things pushed on the stack that aren't yet used.
9869
9870 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9871 compared.
9872
9873 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9874 size of MODE should be used. */
9875
9876 rtx
9877 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9878 register rtx op0, op1;
9879 enum rtx_code code;
9880 int unsignedp;
9881 enum machine_mode mode;
9882 rtx size;
9883 unsigned int align;
9884 {
9885 rtx tem;
9886
9887 /* If one operand is constant, make it the second one. Only do this
9888 if the other operand is not constant as well. */
9889
9890 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9891 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9892 {
9893 tem = op0;
9894 op0 = op1;
9895 op1 = tem;
9896 code = swap_condition (code);
9897 }
9898
9899 if (flag_force_mem)
9900 {
9901 op0 = force_not_mem (op0);
9902 op1 = force_not_mem (op1);
9903 }
9904
9905 do_pending_stack_adjust ();
9906
9907 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9908 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9909 return tem;
9910
9911 #if 0
9912 /* There's no need to do this now that combine.c can eliminate lots of
9913 sign extensions. This can be less efficient in certain cases on other
9914 machines. */
9915
9916 /* If this is a signed equality comparison, we can do it as an
9917 unsigned comparison since zero-extension is cheaper than sign
9918 extension and comparisons with zero are done as unsigned. This is
9919 the case even on machines that can do fast sign extension, since
9920 zero-extension is easier to combine with other operations than
9921 sign-extension is. If we are comparing against a constant, we must
9922 convert it to what it would look like unsigned. */
9923 if ((code == EQ || code == NE) && ! unsignedp
9924 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9925 {
9926 if (GET_CODE (op1) == CONST_INT
9927 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9928 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9929 unsignedp = 1;
9930 }
9931 #endif
9932
9933 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9934
9935 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9936 }
9937
9938 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9939 The decision as to signed or unsigned comparison must be made by the caller.
9940
9941 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9942 compared.
9943
9944 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9945 size of MODE should be used. */
9946
9947 void
9948 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9949 if_false_label, if_true_label)
9950 register rtx op0, op1;
9951 enum rtx_code code;
9952 int unsignedp;
9953 enum machine_mode mode;
9954 rtx size;
9955 unsigned int align;
9956 rtx if_false_label, if_true_label;
9957 {
9958 rtx tem;
9959 int dummy_true_label = 0;
9960
9961 /* Reverse the comparison if that is safe and we want to jump if it is
9962 false. */
9963 if (! if_true_label && ! FLOAT_MODE_P (mode))
9964 {
9965 if_true_label = if_false_label;
9966 if_false_label = 0;
9967 code = reverse_condition (code);
9968 }
9969
9970 /* If one operand is constant, make it the second one. Only do this
9971 if the other operand is not constant as well. */
9972
9973 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9974 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9975 {
9976 tem = op0;
9977 op0 = op1;
9978 op1 = tem;
9979 code = swap_condition (code);
9980 }
9981
9982 if (flag_force_mem)
9983 {
9984 op0 = force_not_mem (op0);
9985 op1 = force_not_mem (op1);
9986 }
9987
9988 do_pending_stack_adjust ();
9989
9990 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9991 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9992 {
9993 if (tem == const_true_rtx)
9994 {
9995 if (if_true_label)
9996 emit_jump (if_true_label);
9997 }
9998 else
9999 {
10000 if (if_false_label)
10001 emit_jump (if_false_label);
10002 }
10003 return;
10004 }
10005
10006 #if 0
10007 /* There's no need to do this now that combine.c can eliminate lots of
10008 sign extensions. This can be less efficient in certain cases on other
10009 machines. */
10010
10011 /* If this is a signed equality comparison, we can do it as an
10012 unsigned comparison since zero-extension is cheaper than sign
10013 extension and comparisons with zero are done as unsigned. This is
10014 the case even on machines that can do fast sign extension, since
10015 zero-extension is easier to combine with other operations than
10016 sign-extension is. If we are comparing against a constant, we must
10017 convert it to what it would look like unsigned. */
10018 if ((code == EQ || code == NE) && ! unsignedp
10019 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10020 {
10021 if (GET_CODE (op1) == CONST_INT
10022 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10023 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10024 unsignedp = 1;
10025 }
10026 #endif
10027
10028 if (! if_true_label)
10029 {
10030 dummy_true_label = 1;
10031 if_true_label = gen_label_rtx ();
10032 }
10033
10034 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10035 if_true_label);
10036
10037 if (if_false_label)
10038 emit_jump (if_false_label);
10039 if (dummy_true_label)
10040 emit_label (if_true_label);
10041 }
10042
10043 /* Generate code for a comparison expression EXP (including code to compute
10044 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10045 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10046 generated code will drop through.
10047 SIGNED_CODE should be the rtx operation for this comparison for
10048 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10049
10050 We force a stack adjustment unless there are currently
10051 things pushed on the stack that aren't yet used. */
10052
10053 static void
10054 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10055 if_true_label)
10056 register tree exp;
10057 enum rtx_code signed_code, unsigned_code;
10058 rtx if_false_label, if_true_label;
10059 {
10060 unsigned int align0, align1;
10061 register rtx op0, op1;
10062 register tree type;
10063 register enum machine_mode mode;
10064 int unsignedp;
10065 enum rtx_code code;
10066
10067 /* Don't crash if the comparison was erroneous. */
10068 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10069 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10070 return;
10071
10072 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10073 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10074 mode = TYPE_MODE (type);
10075 unsignedp = TREE_UNSIGNED (type);
10076 code = unsignedp ? unsigned_code : signed_code;
10077
10078 #ifdef HAVE_canonicalize_funcptr_for_compare
10079 /* If function pointers need to be "canonicalized" before they can
10080 be reliably compared, then canonicalize them. */
10081 if (HAVE_canonicalize_funcptr_for_compare
10082 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10083 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10084 == FUNCTION_TYPE))
10085 {
10086 rtx new_op0 = gen_reg_rtx (mode);
10087
10088 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10089 op0 = new_op0;
10090 }
10091
10092 if (HAVE_canonicalize_funcptr_for_compare
10093 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10094 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10095 == FUNCTION_TYPE))
10096 {
10097 rtx new_op1 = gen_reg_rtx (mode);
10098
10099 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10100 op1 = new_op1;
10101 }
10102 #endif
10103
10104 /* Do any postincrements in the expression that was tested. */
10105 emit_queue ();
10106
10107 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10108 ((mode == BLKmode)
10109 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10110 MIN (align0, align1),
10111 if_false_label, if_true_label);
10112 }
10113 \f
10114 /* Generate code to calculate EXP using a store-flag instruction
10115 and return an rtx for the result. EXP is either a comparison
10116 or a TRUTH_NOT_EXPR whose operand is a comparison.
10117
10118 If TARGET is nonzero, store the result there if convenient.
10119
10120 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10121 cheap.
10122
10123 Return zero if there is no suitable set-flag instruction
10124 available on this machine.
10125
10126 Once expand_expr has been called on the arguments of the comparison,
10127 we are committed to doing the store flag, since it is not safe to
10128 re-evaluate the expression. We emit the store-flag insn by calling
10129 emit_store_flag, but only expand the arguments if we have a reason
10130 to believe that emit_store_flag will be successful. If we think that
10131 it will, but it isn't, we have to simulate the store-flag with a
10132 set/jump/set sequence. */
10133
10134 static rtx
10135 do_store_flag (exp, target, mode, only_cheap)
10136 tree exp;
10137 rtx target;
10138 enum machine_mode mode;
10139 int only_cheap;
10140 {
10141 enum rtx_code code;
10142 tree arg0, arg1, type;
10143 tree tem;
10144 enum machine_mode operand_mode;
10145 int invert = 0;
10146 int unsignedp;
10147 rtx op0, op1;
10148 enum insn_code icode;
10149 rtx subtarget = target;
10150 rtx result, label;
10151
10152 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10153 result at the end. We can't simply invert the test since it would
10154 have already been inverted if it were valid. This case occurs for
10155 some floating-point comparisons. */
10156
10157 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10158 invert = 1, exp = TREE_OPERAND (exp, 0);
10159
10160 arg0 = TREE_OPERAND (exp, 0);
10161 arg1 = TREE_OPERAND (exp, 1);
10162 type = TREE_TYPE (arg0);
10163 operand_mode = TYPE_MODE (type);
10164 unsignedp = TREE_UNSIGNED (type);
10165
10166 /* We won't bother with BLKmode store-flag operations because it would mean
10167 passing a lot of information to emit_store_flag. */
10168 if (operand_mode == BLKmode)
10169 return 0;
10170
10171 /* We won't bother with store-flag operations involving function pointers
10172 when function pointers must be canonicalized before comparisons. */
10173 #ifdef HAVE_canonicalize_funcptr_for_compare
10174 if (HAVE_canonicalize_funcptr_for_compare
10175 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10176 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10177 == FUNCTION_TYPE))
10178 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10179 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10180 == FUNCTION_TYPE))))
10181 return 0;
10182 #endif
10183
10184 STRIP_NOPS (arg0);
10185 STRIP_NOPS (arg1);
10186
10187 /* Get the rtx comparison code to use. We know that EXP is a comparison
10188 operation of some type. Some comparisons against 1 and -1 can be
10189 converted to comparisons with zero. Do so here so that the tests
10190 below will be aware that we have a comparison with zero. These
10191 tests will not catch constants in the first operand, but constants
10192 are rarely passed as the first operand. */
10193
10194 switch (TREE_CODE (exp))
10195 {
10196 case EQ_EXPR:
10197 code = EQ;
10198 break;
10199 case NE_EXPR:
10200 code = NE;
10201 break;
10202 case LT_EXPR:
10203 if (integer_onep (arg1))
10204 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10205 else
10206 code = unsignedp ? LTU : LT;
10207 break;
10208 case LE_EXPR:
10209 if (! unsignedp && integer_all_onesp (arg1))
10210 arg1 = integer_zero_node, code = LT;
10211 else
10212 code = unsignedp ? LEU : LE;
10213 break;
10214 case GT_EXPR:
10215 if (! unsignedp && integer_all_onesp (arg1))
10216 arg1 = integer_zero_node, code = GE;
10217 else
10218 code = unsignedp ? GTU : GT;
10219 break;
10220 case GE_EXPR:
10221 if (integer_onep (arg1))
10222 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10223 else
10224 code = unsignedp ? GEU : GE;
10225 break;
10226
10227 case UNORDERED_EXPR:
10228 code = UNORDERED;
10229 break;
10230 case ORDERED_EXPR:
10231 code = ORDERED;
10232 break;
10233 case UNLT_EXPR:
10234 code = UNLT;
10235 break;
10236 case UNLE_EXPR:
10237 code = UNLE;
10238 break;
10239 case UNGT_EXPR:
10240 code = UNGT;
10241 break;
10242 case UNGE_EXPR:
10243 code = UNGE;
10244 break;
10245 case UNEQ_EXPR:
10246 code = UNEQ;
10247 break;
10248
10249 default:
10250 abort ();
10251 }
10252
10253 /* Put a constant second. */
10254 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10255 {
10256 tem = arg0; arg0 = arg1; arg1 = tem;
10257 code = swap_condition (code);
10258 }
10259
10260 /* If this is an equality or inequality test of a single bit, we can
10261 do this by shifting the bit being tested to the low-order bit and
10262 masking the result with the constant 1. If the condition was EQ,
10263 we xor it with 1. This does not require an scc insn and is faster
10264 than an scc insn even if we have it. */
10265
10266 if ((code == NE || code == EQ)
10267 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10268 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10269 {
10270 tree inner = TREE_OPERAND (arg0, 0);
10271 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10272 int ops_unsignedp;
10273
10274 /* If INNER is a right shift of a constant and it plus BITNUM does
10275 not overflow, adjust BITNUM and INNER. */
10276
10277 if (TREE_CODE (inner) == RSHIFT_EXPR
10278 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10279 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10280 && bitnum < TYPE_PRECISION (type)
10281 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10282 bitnum - TYPE_PRECISION (type)))
10283 {
10284 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10285 inner = TREE_OPERAND (inner, 0);
10286 }
10287
10288 /* If we are going to be able to omit the AND below, we must do our
10289 operations as unsigned. If we must use the AND, we have a choice.
10290 Normally unsigned is faster, but for some machines signed is. */
10291 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10292 #ifdef LOAD_EXTEND_OP
10293 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10294 #else
10295 : 1
10296 #endif
10297 );
10298
10299 if (! get_subtarget (subtarget)
10300 || GET_MODE (subtarget) != operand_mode
10301 || ! safe_from_p (subtarget, inner, 1))
10302 subtarget = 0;
10303
10304 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10305
10306 if (bitnum != 0)
10307 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10308 size_int (bitnum), subtarget, ops_unsignedp);
10309
10310 if (GET_MODE (op0) != mode)
10311 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10312
10313 if ((code == EQ && ! invert) || (code == NE && invert))
10314 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10315 ops_unsignedp, OPTAB_LIB_WIDEN);
10316
10317 /* Put the AND last so it can combine with more things. */
10318 if (bitnum != TYPE_PRECISION (type) - 1)
10319 op0 = expand_and (op0, const1_rtx, subtarget);
10320
10321 return op0;
10322 }
10323
10324 /* Now see if we are likely to be able to do this. Return if not. */
10325 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10326 return 0;
10327
10328 icode = setcc_gen_code[(int) code];
10329 if (icode == CODE_FOR_nothing
10330 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10331 {
10332 /* We can only do this if it is one of the special cases that
10333 can be handled without an scc insn. */
10334 if ((code == LT && integer_zerop (arg1))
10335 || (! only_cheap && code == GE && integer_zerop (arg1)))
10336 ;
10337 else if (BRANCH_COST >= 0
10338 && ! only_cheap && (code == NE || code == EQ)
10339 && TREE_CODE (type) != REAL_TYPE
10340 && ((abs_optab->handlers[(int) operand_mode].insn_code
10341 != CODE_FOR_nothing)
10342 || (ffs_optab->handlers[(int) operand_mode].insn_code
10343 != CODE_FOR_nothing)))
10344 ;
10345 else
10346 return 0;
10347 }
10348
10349 preexpand_calls (exp);
10350 if (! get_subtarget (target)
10351 || GET_MODE (subtarget) != operand_mode
10352 || ! safe_from_p (subtarget, arg1, 1))
10353 subtarget = 0;
10354
10355 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10356 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10357
10358 if (target == 0)
10359 target = gen_reg_rtx (mode);
10360
10361 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10362 because, if the emit_store_flag does anything it will succeed and
10363 OP0 and OP1 will not be used subsequently. */
10364
10365 result = emit_store_flag (target, code,
10366 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10367 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10368 operand_mode, unsignedp, 1);
10369
10370 if (result)
10371 {
10372 if (invert)
10373 result = expand_binop (mode, xor_optab, result, const1_rtx,
10374 result, 0, OPTAB_LIB_WIDEN);
10375 return result;
10376 }
10377
10378 /* If this failed, we have to do this with set/compare/jump/set code. */
10379 if (GET_CODE (target) != REG
10380 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10381 target = gen_reg_rtx (GET_MODE (target));
10382
10383 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10384 result = compare_from_rtx (op0, op1, code, unsignedp,
10385 operand_mode, NULL_RTX, 0);
10386 if (GET_CODE (result) == CONST_INT)
10387 return (((result == const0_rtx && ! invert)
10388 || (result != const0_rtx && invert))
10389 ? const0_rtx : const1_rtx);
10390
10391 label = gen_label_rtx ();
10392 if (bcc_gen_fctn[(int) code] == 0)
10393 abort ();
10394
10395 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10396 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10397 emit_label (label);
10398
10399 return target;
10400 }
10401 \f
10402 /* Generate a tablejump instruction (used for switch statements). */
10403
10404 #ifdef HAVE_tablejump
10405
10406 /* INDEX is the value being switched on, with the lowest value
10407 in the table already subtracted.
10408 MODE is its expected mode (needed if INDEX is constant).
10409 RANGE is the length of the jump table.
10410 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10411
10412 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10413 index value is out of range. */
10414
10415 void
10416 do_tablejump (index, mode, range, table_label, default_label)
10417 rtx index, range, table_label, default_label;
10418 enum machine_mode mode;
10419 {
10420 register rtx temp, vector;
10421
10422 /* Do an unsigned comparison (in the proper mode) between the index
10423 expression and the value which represents the length of the range.
10424 Since we just finished subtracting the lower bound of the range
10425 from the index expression, this comparison allows us to simultaneously
10426 check that the original index expression value is both greater than
10427 or equal to the minimum value of the range and less than or equal to
10428 the maximum value of the range. */
10429
10430 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10431 0, default_label);
10432
10433 /* If index is in range, it must fit in Pmode.
10434 Convert to Pmode so we can index with it. */
10435 if (mode != Pmode)
10436 index = convert_to_mode (Pmode, index, 1);
10437
10438 /* Don't let a MEM slip thru, because then INDEX that comes
10439 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10440 and break_out_memory_refs will go to work on it and mess it up. */
10441 #ifdef PIC_CASE_VECTOR_ADDRESS
10442 if (flag_pic && GET_CODE (index) != REG)
10443 index = copy_to_mode_reg (Pmode, index);
10444 #endif
10445
10446 /* If flag_force_addr were to affect this address
10447 it could interfere with the tricky assumptions made
10448 about addresses that contain label-refs,
10449 which may be valid only very near the tablejump itself. */
10450 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10451 GET_MODE_SIZE, because this indicates how large insns are. The other
10452 uses should all be Pmode, because they are addresses. This code
10453 could fail if addresses and insns are not the same size. */
10454 index = gen_rtx_PLUS (Pmode,
10455 gen_rtx_MULT (Pmode, index,
10456 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10457 gen_rtx_LABEL_REF (Pmode, table_label));
10458 #ifdef PIC_CASE_VECTOR_ADDRESS
10459 if (flag_pic)
10460 index = PIC_CASE_VECTOR_ADDRESS (index);
10461 else
10462 #endif
10463 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10464 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10465 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10466 RTX_UNCHANGING_P (vector) = 1;
10467 convert_move (temp, vector, 0);
10468
10469 emit_jump_insn (gen_tablejump (temp, table_label));
10470
10471 /* If we are generating PIC code or if the table is PC-relative, the
10472 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10473 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10474 emit_barrier ();
10475 }
10476
10477 #endif /* HAVE_tablejump */