expr.c (reload.h): Now included.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "machmode.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "defaults.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 #ifndef ACCUMULATE_OUTGOING_ARGS
50 #define ACCUMULATE_OUTGOING_ARGS 0
51 #endif
52
53 /* Supply a default definition for PUSH_ARGS. */
54 #ifndef PUSH_ARGS
55 #ifdef PUSH_ROUNDING
56 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
57 #else
58 #define PUSH_ARGS 0
59 #endif
60 #endif
61
62 /* Decide whether a function's arguments should be processed
63 from first to last or from last to first.
64
65 They should if the stack and args grow in opposite directions, but
66 only if we have push insns. */
67
68 #ifdef PUSH_ROUNDING
69
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first */
72 #endif
73
74 #endif
75
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
83
84 /* Assume that case vectors are not pc-relative. */
85 #ifndef CASE_VECTOR_PC_RELATIVE
86 #define CASE_VECTOR_PC_RELATIVE 0
87 #endif
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Nonzero to generate code for all the subroutines within an
98 expression before generating the upper levels of the expression.
99 Nowadays this is never zero. */
100 int do_preexpand_calls = 1;
101
102 /* Don't check memory usage, since code is being emitted to check a memory
103 usage. Used when current_function_check_memory_usage is true, to avoid
104 infinite recursion. */
105 static int in_check_memory_usage;
106
107 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
108 static tree placeholder_list = 0;
109
110 /* This structure is used by move_by_pieces to describe the move to
111 be performed. */
112 struct move_by_pieces
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 int to_struct;
119 int to_readonly;
120 rtx from;
121 rtx from_addr;
122 int autinc_from;
123 int explicit_inc_from;
124 int from_struct;
125 int from_readonly;
126 int len;
127 int offset;
128 int reverse;
129 };
130
131 /* This structure is used by clear_by_pieces to describe the clear to
132 be performed. */
133
134 struct clear_by_pieces
135 {
136 rtx to;
137 rtx to_addr;
138 int autinc_to;
139 int explicit_inc_to;
140 int to_struct;
141 int len;
142 int offset;
143 int reverse;
144 };
145
146 extern struct obstack permanent_obstack;
147
148 static rtx get_push_address PARAMS ((int));
149
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
152 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
153 struct move_by_pieces *));
154 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
155 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
156 enum machine_mode,
157 struct clear_by_pieces *));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, unsigned int, int));
163 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
164 HOST_WIDE_INT));
165 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
166 HOST_WIDE_INT, enum machine_mode,
167 tree, enum machine_mode, int,
168 unsigned int, HOST_WIDE_INT, int));
169 static enum memory_use_mode
170 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
171 static tree save_noncopied_parts PARAMS ((tree, tree));
172 static tree init_noncopied_parts PARAMS ((tree, tree));
173 static int safe_from_p PARAMS ((rtx, tree, int));
174 static int fixed_type_p PARAMS ((tree));
175 static rtx var_rtx PARAMS ((tree));
176 static int readonly_fields_p PARAMS ((tree));
177 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
178 static rtx expand_increment PARAMS ((tree, int, int));
179 static void preexpand_calls PARAMS ((tree));
180 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
181 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
182 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
183 rtx, rtx));
184 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
185
186 /* Record for each mode whether we can move a register directly to or
187 from an object of that mode in memory. If we can't, we won't try
188 to use that mode directly when accessing a field of that mode. */
189
190 static char direct_load[NUM_MACHINE_MODES];
191 static char direct_store[NUM_MACHINE_MODES];
192
193 /* If a memory-to-memory move would take MOVE_RATIO or more simple
194 move-instruction sequences, we will do a movstr or libcall instead. */
195
196 #ifndef MOVE_RATIO
197 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
198 #define MOVE_RATIO 2
199 #else
200 /* If we are optimizing for space (-Os), cut down the default move ratio */
201 #define MOVE_RATIO (optimize_size ? 3 : 15)
202 #endif
203 #endif
204
205 /* This macro is used to determine whether move_by_pieces should be called
206 to perform a structure copy. */
207 #ifndef MOVE_BY_PIECES_P
208 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
209 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
210 #endif
211
212 /* This array records the insn_code of insns to perform block moves. */
213 enum insn_code movstr_optab[NUM_MACHINE_MODES];
214
215 /* This array records the insn_code of insns to perform block clears. */
216 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
217
218 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
219
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
222 #endif
223 \f
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
226
227 void
228 init_expr_once ()
229 {
230 rtx insn, pat;
231 enum machine_mode mode;
232 int num_clobbers;
233 rtx mem, mem1;
234 char *free_point;
235
236 start_sequence ();
237
238 /* Since we are on the permanent obstack, we must be sure we save this
239 spot AFTER we call start_sequence, since it will reuse the rtl it
240 makes. */
241 free_point = (char *) oballoc (0);
242
243 /* Try indexing by frame ptr and try by stack ptr.
244 It is known that on the Convex the stack ptr isn't a valid index.
245 With luck, one or the other is valid on any machine. */
246 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
247 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
248
249 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
250 pat = PATTERN (insn);
251
252 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
253 mode = (enum machine_mode) ((int) mode + 1))
254 {
255 int regno;
256 rtx reg;
257
258 direct_load[(int) mode] = direct_store[(int) mode] = 0;
259 PUT_MODE (mem, mode);
260 PUT_MODE (mem1, mode);
261
262 /* See if there is some register that can be used in this mode and
263 directly loaded or stored from memory. */
264
265 if (mode != VOIDmode && mode != BLKmode)
266 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
267 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
268 regno++)
269 {
270 if (! HARD_REGNO_MODE_OK (regno, mode))
271 continue;
272
273 reg = gen_rtx_REG (mode, regno);
274
275 SET_SRC (pat) = mem;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
279
280 SET_SRC (pat) = mem1;
281 SET_DEST (pat) = reg;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_load[(int) mode] = 1;
284
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
289
290 SET_SRC (pat) = reg;
291 SET_DEST (pat) = mem1;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_store[(int) mode] = 1;
294 }
295 }
296
297 end_sequence ();
298 obfree (free_point);
299 }
300
301 /* This is run at the start of compiling a function. */
302
303 void
304 init_expr ()
305 {
306 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
307
308 pending_chain = 0;
309 pending_stack_adjust = 0;
310 stack_pointer_delta = 0;
311 inhibit_defer_pop = 0;
312 saveregs_value = 0;
313 apply_args_value = 0;
314 forced_labels = 0;
315 }
316
317 void
318 mark_expr_status (p)
319 struct expr_status *p;
320 {
321 if (p == NULL)
322 return;
323
324 ggc_mark_rtx (p->x_saveregs_value);
325 ggc_mark_rtx (p->x_apply_args_value);
326 ggc_mark_rtx (p->x_forced_labels);
327 }
328
329 void
330 free_expr_status (f)
331 struct function *f;
332 {
333 free (f->expr);
334 f->expr = NULL;
335 }
336
337 /* Small sanity check that the queue is empty at the end of a function. */
338 void
339 finish_expr_for_function ()
340 {
341 if (pending_chain)
342 abort ();
343 }
344 \f
345 /* Manage the queue of increment instructions to be output
346 for POSTINCREMENT_EXPR expressions, etc. */
347
348 /* Queue up to increment (or change) VAR later. BODY says how:
349 BODY should be the same thing you would pass to emit_insn
350 to increment right away. It will go to emit_insn later on.
351
352 The value is a QUEUED expression to be used in place of VAR
353 where you want to guarantee the pre-incrementation value of VAR. */
354
355 static rtx
356 enqueue_insn (var, body)
357 rtx var, body;
358 {
359 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
360 body, pending_chain);
361 return pending_chain;
362 }
363
364 /* Use protect_from_queue to convert a QUEUED expression
365 into something that you can put immediately into an instruction.
366 If the queued incrementation has not happened yet,
367 protect_from_queue returns the variable itself.
368 If the incrementation has happened, protect_from_queue returns a temp
369 that contains a copy of the old value of the variable.
370
371 Any time an rtx which might possibly be a QUEUED is to be put
372 into an instruction, it must be passed through protect_from_queue first.
373 QUEUED expressions are not meaningful in instructions.
374
375 Do not pass a value through protect_from_queue and then hold
376 on to it for a while before putting it in an instruction!
377 If the queue is flushed in between, incorrect code will result. */
378
379 rtx
380 protect_from_queue (x, modify)
381 register rtx x;
382 int modify;
383 {
384 register RTX_CODE code = GET_CODE (x);
385
386 #if 0 /* A QUEUED can hang around after the queue is forced out. */
387 /* Shortcut for most common case. */
388 if (pending_chain == 0)
389 return x;
390 #endif
391
392 if (code != QUEUED)
393 {
394 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
395 use of autoincrement. Make a copy of the contents of the memory
396 location rather than a copy of the address, but not if the value is
397 of mode BLKmode. Don't modify X in place since it might be
398 shared. */
399 if (code == MEM && GET_MODE (x) != BLKmode
400 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
401 {
402 register rtx y = XEXP (x, 0);
403 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
404
405 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
406 MEM_COPY_ATTRIBUTES (new, x);
407 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
408
409 if (QUEUED_INSN (y))
410 {
411 register rtx temp = gen_reg_rtx (GET_MODE (new));
412 emit_insn_before (gen_move_insn (temp, new),
413 QUEUED_INSN (y));
414 return temp;
415 }
416 return new;
417 }
418 /* Otherwise, recursively protect the subexpressions of all
419 the kinds of rtx's that can contain a QUEUED. */
420 if (code == MEM)
421 {
422 rtx tem = protect_from_queue (XEXP (x, 0), 0);
423 if (tem != XEXP (x, 0))
424 {
425 x = copy_rtx (x);
426 XEXP (x, 0) = tem;
427 }
428 }
429 else if (code == PLUS || code == MULT)
430 {
431 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
432 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
433 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
434 {
435 x = copy_rtx (x);
436 XEXP (x, 0) = new0;
437 XEXP (x, 1) = new1;
438 }
439 }
440 return x;
441 }
442 /* If the increment has not happened, use the variable itself. */
443 if (QUEUED_INSN (x) == 0)
444 return QUEUED_VAR (x);
445 /* If the increment has happened and a pre-increment copy exists,
446 use that copy. */
447 if (QUEUED_COPY (x) != 0)
448 return QUEUED_COPY (x);
449 /* The increment has happened but we haven't set up a pre-increment copy.
450 Set one up now, and use it. */
451 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
452 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
453 QUEUED_INSN (x));
454 return QUEUED_COPY (x);
455 }
456
457 /* Return nonzero if X contains a QUEUED expression:
458 if it contains anything that will be altered by a queued increment.
459 We handle only combinations of MEM, PLUS, MINUS and MULT operators
460 since memory addresses generally contain only those. */
461
462 int
463 queued_subexp_p (x)
464 rtx x;
465 {
466 register enum rtx_code code = GET_CODE (x);
467 switch (code)
468 {
469 case QUEUED:
470 return 1;
471 case MEM:
472 return queued_subexp_p (XEXP (x, 0));
473 case MULT:
474 case PLUS:
475 case MINUS:
476 return (queued_subexp_p (XEXP (x, 0))
477 || queued_subexp_p (XEXP (x, 1)));
478 default:
479 return 0;
480 }
481 }
482
483 /* Perform all the pending incrementations. */
484
485 void
486 emit_queue ()
487 {
488 register rtx p;
489 while ((p = pending_chain))
490 {
491 rtx body = QUEUED_BODY (p);
492
493 if (GET_CODE (body) == SEQUENCE)
494 {
495 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
496 emit_insn (QUEUED_BODY (p));
497 }
498 else
499 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
500 pending_chain = QUEUED_NEXT (p);
501 }
502 }
503 \f
504 /* Copy data from FROM to TO, where the machine modes are not the same.
505 Both modes may be integer, or both may be floating.
506 UNSIGNEDP should be nonzero if FROM is an unsigned type.
507 This causes zero-extension instead of sign-extension. */
508
509 void
510 convert_move (to, from, unsignedp)
511 register rtx to, from;
512 int unsignedp;
513 {
514 enum machine_mode to_mode = GET_MODE (to);
515 enum machine_mode from_mode = GET_MODE (from);
516 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
517 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
518 enum insn_code code;
519 rtx libcall;
520
521 /* rtx code for making an equivalent value. */
522 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
523
524 to = protect_from_queue (to, 1);
525 from = protect_from_queue (from, 0);
526
527 if (to_real != from_real)
528 abort ();
529
530 /* If FROM is a SUBREG that indicates that we have already done at least
531 the required extension, strip it. We don't handle such SUBREGs as
532 TO here. */
533
534 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
535 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
536 >= GET_MODE_SIZE (to_mode))
537 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
538 from = gen_lowpart (to_mode, from), from_mode = to_mode;
539
540 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
541 abort ();
542
543 if (to_mode == from_mode
544 || (from_mode == VOIDmode && CONSTANT_P (from)))
545 {
546 emit_move_insn (to, from);
547 return;
548 }
549
550 if (to_real)
551 {
552 rtx value;
553
554 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
555 {
556 /* Try converting directly if the insn is supported. */
557 if ((code = can_extend_p (to_mode, from_mode, 0))
558 != CODE_FOR_nothing)
559 {
560 emit_unop_insn (code, to, from, UNKNOWN);
561 return;
562 }
563 }
564
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
569 return;
570 }
571 #endif
572 #ifdef HAVE_trunctqfqf2
573 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
576 return;
577 }
578 #endif
579 #ifdef HAVE_truncsfqf2
580 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
583 return;
584 }
585 #endif
586 #ifdef HAVE_truncdfqf2
587 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
590 return;
591 }
592 #endif
593 #ifdef HAVE_truncxfqf2
594 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
597 return;
598 }
599 #endif
600 #ifdef HAVE_trunctfqf2
601 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
604 return;
605 }
606 #endif
607
608 #ifdef HAVE_trunctqfhf2
609 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
610 {
611 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
612 return;
613 }
614 #endif
615 #ifdef HAVE_truncsfhf2
616 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
619 return;
620 }
621 #endif
622 #ifdef HAVE_truncdfhf2
623 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
626 return;
627 }
628 #endif
629 #ifdef HAVE_truncxfhf2
630 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
633 return;
634 }
635 #endif
636 #ifdef HAVE_trunctfhf2
637 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
638 {
639 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
640 return;
641 }
642 #endif
643
644 #ifdef HAVE_truncsftqf2
645 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651 #ifdef HAVE_truncdftqf2
652 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
653 {
654 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658 #ifdef HAVE_truncxftqf2
659 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
660 {
661 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665 #ifdef HAVE_trunctftqf2
666 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
667 {
668 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
669 return;
670 }
671 #endif
672
673 #ifdef HAVE_truncdfsf2
674 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_truncxfsf2
681 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687 #ifdef HAVE_trunctfsf2
688 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
689 {
690 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_truncxfdf2
695 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
696 {
697 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701 #ifdef HAVE_trunctfdf2
702 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
703 {
704 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 return;
706 }
707 #endif
708
709 libcall = (rtx) 0;
710 switch (from_mode)
711 {
712 case SFmode:
713 switch (to_mode)
714 {
715 case DFmode:
716 libcall = extendsfdf2_libfunc;
717 break;
718
719 case XFmode:
720 libcall = extendsfxf2_libfunc;
721 break;
722
723 case TFmode:
724 libcall = extendsftf2_libfunc;
725 break;
726
727 default:
728 break;
729 }
730 break;
731
732 case DFmode:
733 switch (to_mode)
734 {
735 case SFmode:
736 libcall = truncdfsf2_libfunc;
737 break;
738
739 case XFmode:
740 libcall = extenddfxf2_libfunc;
741 break;
742
743 case TFmode:
744 libcall = extenddftf2_libfunc;
745 break;
746
747 default:
748 break;
749 }
750 break;
751
752 case XFmode:
753 switch (to_mode)
754 {
755 case SFmode:
756 libcall = truncxfsf2_libfunc;
757 break;
758
759 case DFmode:
760 libcall = truncxfdf2_libfunc;
761 break;
762
763 default:
764 break;
765 }
766 break;
767
768 case TFmode:
769 switch (to_mode)
770 {
771 case SFmode:
772 libcall = trunctfsf2_libfunc;
773 break;
774
775 case DFmode:
776 libcall = trunctfdf2_libfunc;
777 break;
778
779 default:
780 break;
781 }
782 break;
783
784 default:
785 break;
786 }
787
788 if (libcall == (rtx) 0)
789 /* This conversion is not implemented yet. */
790 abort ();
791
792 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
793 1, from, from_mode);
794 emit_move_insn (to, value);
795 return;
796 }
797
798 /* Now both modes are integers. */
799
800 /* Handle expanding beyond a word. */
801 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
802 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
803 {
804 rtx insns;
805 rtx lowpart;
806 rtx fill_value;
807 rtx lowfrom;
808 int i;
809 enum machine_mode lowpart_mode;
810 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
811
812 /* Try converting directly if the insn is supported. */
813 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
814 != CODE_FOR_nothing)
815 {
816 /* If FROM is a SUBREG, put it into a register. Do this
817 so that we always generate the same set of insns for
818 better cse'ing; if an intermediate assignment occurred,
819 we won't be doing the operation directly on the SUBREG. */
820 if (optimize > 0 && GET_CODE (from) == SUBREG)
821 from = force_reg (from_mode, from);
822 emit_unop_insn (code, to, from, equiv_code);
823 return;
824 }
825 /* Next, try converting via full word. */
826 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
827 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
828 != CODE_FOR_nothing))
829 {
830 if (GET_CODE (to) == REG)
831 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
832 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
833 emit_unop_insn (code, to,
834 gen_lowpart (word_mode, to), equiv_code);
835 return;
836 }
837
838 /* No special multiword conversion insn; do it by hand. */
839 start_sequence ();
840
841 /* Since we will turn this into a no conflict block, we must ensure
842 that the source does not overlap the target. */
843
844 if (reg_overlap_mentioned_p (to, from))
845 from = force_reg (from_mode, from);
846
847 /* Get a copy of FROM widened to a word, if necessary. */
848 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
849 lowpart_mode = word_mode;
850 else
851 lowpart_mode = from_mode;
852
853 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
854
855 lowpart = gen_lowpart (lowpart_mode, to);
856 emit_move_insn (lowpart, lowfrom);
857
858 /* Compute the value to put in each remaining word. */
859 if (unsignedp)
860 fill_value = const0_rtx;
861 else
862 {
863 #ifdef HAVE_slt
864 if (HAVE_slt
865 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
866 && STORE_FLAG_VALUE == -1)
867 {
868 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
869 lowpart_mode, 0, 0);
870 fill_value = gen_reg_rtx (word_mode);
871 emit_insn (gen_slt (fill_value));
872 }
873 else
874 #endif
875 {
876 fill_value
877 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
878 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
879 NULL_RTX, 0);
880 fill_value = convert_to_mode (word_mode, fill_value, 1);
881 }
882 }
883
884 /* Fill the remaining words. */
885 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
886 {
887 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
888 rtx subword = operand_subword (to, index, 1, to_mode);
889
890 if (subword == 0)
891 abort ();
892
893 if (fill_value != subword)
894 emit_move_insn (subword, fill_value);
895 }
896
897 insns = get_insns ();
898 end_sequence ();
899
900 emit_no_conflict_block (insns, to, from, NULL_RTX,
901 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
902 return;
903 }
904
905 /* Truncating multi-word to a word or less. */
906 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
907 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
908 {
909 if (!((GET_CODE (from) == MEM
910 && ! MEM_VOLATILE_P (from)
911 && direct_load[(int) to_mode]
912 && ! mode_dependent_address_p (XEXP (from, 0)))
913 || GET_CODE (from) == REG
914 || GET_CODE (from) == SUBREG))
915 from = force_reg (from_mode, from);
916 convert_move (to, gen_lowpart (word_mode, from), 0);
917 return;
918 }
919
920 /* Handle pointer conversion */ /* SPEE 900220 */
921 if (to_mode == PQImode)
922 {
923 if (from_mode != QImode)
924 from = convert_to_mode (QImode, from, unsignedp);
925
926 #ifdef HAVE_truncqipqi2
927 if (HAVE_truncqipqi2)
928 {
929 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
930 return;
931 }
932 #endif /* HAVE_truncqipqi2 */
933 abort ();
934 }
935
936 if (from_mode == PQImode)
937 {
938 if (to_mode != QImode)
939 {
940 from = convert_to_mode (QImode, from, unsignedp);
941 from_mode = QImode;
942 }
943 else
944 {
945 #ifdef HAVE_extendpqiqi2
946 if (HAVE_extendpqiqi2)
947 {
948 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
949 return;
950 }
951 #endif /* HAVE_extendpqiqi2 */
952 abort ();
953 }
954 }
955
956 if (to_mode == PSImode)
957 {
958 if (from_mode != SImode)
959 from = convert_to_mode (SImode, from, unsignedp);
960
961 #ifdef HAVE_truncsipsi2
962 if (HAVE_truncsipsi2)
963 {
964 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
965 return;
966 }
967 #endif /* HAVE_truncsipsi2 */
968 abort ();
969 }
970
971 if (from_mode == PSImode)
972 {
973 if (to_mode != SImode)
974 {
975 from = convert_to_mode (SImode, from, unsignedp);
976 from_mode = SImode;
977 }
978 else
979 {
980 #ifdef HAVE_extendpsisi2
981 if (HAVE_extendpsisi2)
982 {
983 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
984 return;
985 }
986 #endif /* HAVE_extendpsisi2 */
987 abort ();
988 }
989 }
990
991 if (to_mode == PDImode)
992 {
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
995
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
998 {
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_truncdipdi2 */
1003 abort ();
1004 }
1005
1006 if (from_mode == PDImode)
1007 {
1008 if (to_mode != DImode)
1009 {
1010 from = convert_to_mode (DImode, from, unsignedp);
1011 from_mode = DImode;
1012 }
1013 else
1014 {
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1017 {
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019 return;
1020 }
1021 #endif /* HAVE_extendpdidi2 */
1022 abort ();
1023 }
1024 }
1025
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1028
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1033 {
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1045 return;
1046 }
1047
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1050 {
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1054 {
1055 emit_unop_insn (code, to, from, equiv_code);
1056 return;
1057 }
1058 else
1059 {
1060 enum machine_mode intermediate;
1061 rtx tmp;
1062 tree shift_amount;
1063
1064 /* Search for a mode to convert via. */
1065 for (intermediate = from_mode; intermediate != VOIDmode;
1066 intermediate = GET_MODE_WIDER_MODE (intermediate))
1067 if (((can_extend_p (to_mode, intermediate, unsignedp)
1068 != CODE_FOR_nothing)
1069 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1070 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1071 GET_MODE_BITSIZE (intermediate))))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1074 {
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1077 return;
1078 }
1079
1080 /* No suitable intermediate mode.
1081 Generate what we need with shifts. */
1082 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1083 - GET_MODE_BITSIZE (from_mode), 0);
1084 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1085 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1086 to, unsignedp);
1087 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1088 to, unsignedp);
1089 if (tmp != to)
1090 emit_move_insn (to, tmp);
1091 return;
1092 }
1093 }
1094
1095 /* Support special truncate insns for certain modes. */
1096
1097 if (from_mode == DImode && to_mode == SImode)
1098 {
1099 #ifdef HAVE_truncdisi2
1100 if (HAVE_truncdisi2)
1101 {
1102 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1103 return;
1104 }
1105 #endif
1106 convert_move (to, force_reg (from_mode, from), unsignedp);
1107 return;
1108 }
1109
1110 if (from_mode == DImode && to_mode == HImode)
1111 {
1112 #ifdef HAVE_truncdihi2
1113 if (HAVE_truncdihi2)
1114 {
1115 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1116 return;
1117 }
1118 #endif
1119 convert_move (to, force_reg (from_mode, from), unsignedp);
1120 return;
1121 }
1122
1123 if (from_mode == DImode && to_mode == QImode)
1124 {
1125 #ifdef HAVE_truncdiqi2
1126 if (HAVE_truncdiqi2)
1127 {
1128 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1129 return;
1130 }
1131 #endif
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1133 return;
1134 }
1135
1136 if (from_mode == SImode && to_mode == HImode)
1137 {
1138 #ifdef HAVE_truncsihi2
1139 if (HAVE_truncsihi2)
1140 {
1141 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1142 return;
1143 }
1144 #endif
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1146 return;
1147 }
1148
1149 if (from_mode == SImode && to_mode == QImode)
1150 {
1151 #ifdef HAVE_truncsiqi2
1152 if (HAVE_truncsiqi2)
1153 {
1154 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1155 return;
1156 }
1157 #endif
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1159 return;
1160 }
1161
1162 if (from_mode == HImode && to_mode == QImode)
1163 {
1164 #ifdef HAVE_trunchiqi2
1165 if (HAVE_trunchiqi2)
1166 {
1167 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1168 return;
1169 }
1170 #endif
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 return;
1173 }
1174
1175 if (from_mode == TImode && to_mode == DImode)
1176 {
1177 #ifdef HAVE_trunctidi2
1178 if (HAVE_trunctidi2)
1179 {
1180 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1181 return;
1182 }
1183 #endif
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 return;
1186 }
1187
1188 if (from_mode == TImode && to_mode == SImode)
1189 {
1190 #ifdef HAVE_trunctisi2
1191 if (HAVE_trunctisi2)
1192 {
1193 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1194 return;
1195 }
1196 #endif
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 return;
1199 }
1200
1201 if (from_mode == TImode && to_mode == HImode)
1202 {
1203 #ifdef HAVE_trunctihi2
1204 if (HAVE_trunctihi2)
1205 {
1206 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1207 return;
1208 }
1209 #endif
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 return;
1212 }
1213
1214 if (from_mode == TImode && to_mode == QImode)
1215 {
1216 #ifdef HAVE_trunctiqi2
1217 if (HAVE_trunctiqi2)
1218 {
1219 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1220 return;
1221 }
1222 #endif
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 return;
1225 }
1226
1227 /* Handle truncation of volatile memrefs, and so on;
1228 the things that couldn't be truncated directly,
1229 and for which there was no special instruction. */
1230 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1231 {
1232 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1233 emit_move_insn (to, temp);
1234 return;
1235 }
1236
1237 /* Mode combination is not recognized. */
1238 abort ();
1239 }
1240
1241 /* Return an rtx for a value that would result
1242 from converting X to mode MODE.
1243 Both X and MODE may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245 This can be done by referring to a part of X in place
1246 or by copying to a new temporary with conversion.
1247
1248 This function *must not* call protect_from_queue
1249 except when putting X into an insn (in which case convert_move does it). */
1250
1251 rtx
1252 convert_to_mode (mode, x, unsignedp)
1253 enum machine_mode mode;
1254 rtx x;
1255 int unsignedp;
1256 {
1257 return convert_modes (mode, VOIDmode, x, unsignedp);
1258 }
1259
1260 /* Return an rtx for a value that would result
1261 from converting X from mode OLDMODE to mode MODE.
1262 Both modes may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264
1265 This can be done by referring to a part of X in place
1266 or by copying to a new temporary with conversion.
1267
1268 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1269
1270 This function *must not* call protect_from_queue
1271 except when putting X into an insn (in which case convert_move does it). */
1272
1273 rtx
1274 convert_modes (mode, oldmode, x, unsignedp)
1275 enum machine_mode mode, oldmode;
1276 rtx x;
1277 int unsignedp;
1278 {
1279 register rtx temp;
1280
1281 /* If FROM is a SUBREG that indicates that we have already done at least
1282 the required extension, strip it. */
1283
1284 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1285 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1286 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1287 x = gen_lowpart (mode, x);
1288
1289 if (GET_MODE (x) != VOIDmode)
1290 oldmode = GET_MODE (x);
1291
1292 if (mode == oldmode)
1293 return x;
1294
1295 /* There is one case that we must handle specially: If we are converting
1296 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1297 we are to interpret the constant as unsigned, gen_lowpart will do
1298 the wrong if the constant appears negative. What we want to do is
1299 make the high-order word of the constant zero, not all ones. */
1300
1301 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1302 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1303 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1304 {
1305 HOST_WIDE_INT val = INTVAL (x);
1306
1307 if (oldmode != VOIDmode
1308 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1309 {
1310 int width = GET_MODE_BITSIZE (oldmode);
1311
1312 /* We need to zero extend VAL. */
1313 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1314 }
1315
1316 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1317 }
1318
1319 /* We can do this with a gen_lowpart if both desired and current modes
1320 are integer, and this is either a constant integer, a register, or a
1321 non-volatile MEM. Except for the constant case where MODE is no
1322 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1323
1324 if ((GET_CODE (x) == CONST_INT
1325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1326 || (GET_MODE_CLASS (mode) == MODE_INT
1327 && GET_MODE_CLASS (oldmode) == MODE_INT
1328 && (GET_CODE (x) == CONST_DOUBLE
1329 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1330 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1331 && direct_load[(int) mode])
1332 || (GET_CODE (x) == REG
1333 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1334 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1335 {
1336 /* ?? If we don't know OLDMODE, we have to assume here that
1337 X does not need sign- or zero-extension. This may not be
1338 the case, but it's the best we can do. */
1339 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1340 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1341 {
1342 HOST_WIDE_INT val = INTVAL (x);
1343 int width = GET_MODE_BITSIZE (oldmode);
1344
1345 /* We must sign or zero-extend in this case. Start by
1346 zero-extending, then sign extend if we need to. */
1347 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1348 if (! unsignedp
1349 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1350 val |= (HOST_WIDE_INT) (-1) << width;
1351
1352 return GEN_INT (val);
1353 }
1354
1355 return gen_lowpart (mode, x);
1356 }
1357
1358 temp = gen_reg_rtx (mode);
1359 convert_move (temp, x, unsignedp);
1360 return temp;
1361 }
1362 \f
1363
1364 /* This macro is used to determine what the largest unit size that
1365 move_by_pieces can use is. */
1366
1367 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1368 move efficiently, as opposed to MOVE_MAX which is the maximum
1369 number of bytes we can move with a single instruction. */
1370
1371 #ifndef MOVE_MAX_PIECES
1372 #define MOVE_MAX_PIECES MOVE_MAX
1373 #endif
1374
1375 /* Generate several move instructions to copy LEN bytes
1376 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1377 The caller must pass FROM and TO
1378 through protect_from_queue before calling.
1379 ALIGN is maximum alignment we can assume. */
1380
1381 void
1382 move_by_pieces (to, from, len, align)
1383 rtx to, from;
1384 int len;
1385 unsigned int align;
1386 {
1387 struct move_by_pieces data;
1388 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1389 unsigned int max_size = MOVE_MAX_PIECES + 1;
1390 enum machine_mode mode = VOIDmode, tmode;
1391 enum insn_code icode;
1392
1393 data.offset = 0;
1394 data.to_addr = to_addr;
1395 data.from_addr = from_addr;
1396 data.to = to;
1397 data.from = from;
1398 data.autinc_to
1399 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1400 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1401 data.autinc_from
1402 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1403 || GET_CODE (from_addr) == POST_INC
1404 || GET_CODE (from_addr) == POST_DEC);
1405
1406 data.explicit_inc_from = 0;
1407 data.explicit_inc_to = 0;
1408 data.reverse
1409 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1410 if (data.reverse) data.offset = len;
1411 data.len = len;
1412
1413 data.to_struct = MEM_IN_STRUCT_P (to);
1414 data.from_struct = MEM_IN_STRUCT_P (from);
1415 data.to_readonly = RTX_UNCHANGING_P (to);
1416 data.from_readonly = RTX_UNCHANGING_P (from);
1417
1418 /* If copying requires more than two move insns,
1419 copy addresses to registers (to make displacements shorter)
1420 and use post-increment if available. */
1421 if (!(data.autinc_from && data.autinc_to)
1422 && move_by_pieces_ninsns (len, align) > 2)
1423 {
1424 /* Find the mode of the largest move... */
1425 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1426 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1427 if (GET_MODE_SIZE (tmode) < max_size)
1428 mode = tmode;
1429
1430 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1431 {
1432 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1433 data.autinc_from = 1;
1434 data.explicit_inc_from = -1;
1435 }
1436 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1437 {
1438 data.from_addr = copy_addr_to_reg (from_addr);
1439 data.autinc_from = 1;
1440 data.explicit_inc_from = 1;
1441 }
1442 if (!data.autinc_from && CONSTANT_P (from_addr))
1443 data.from_addr = copy_addr_to_reg (from_addr);
1444 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1445 {
1446 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1447 data.autinc_to = 1;
1448 data.explicit_inc_to = -1;
1449 }
1450 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1451 {
1452 data.to_addr = copy_addr_to_reg (to_addr);
1453 data.autinc_to = 1;
1454 data.explicit_inc_to = 1;
1455 }
1456 if (!data.autinc_to && CONSTANT_P (to_addr))
1457 data.to_addr = copy_addr_to_reg (to_addr);
1458 }
1459
1460 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1461 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1462 align = MOVE_MAX * BITS_PER_UNIT;
1463
1464 /* First move what we can in the largest integer mode, then go to
1465 successively smaller modes. */
1466
1467 while (max_size > 1)
1468 {
1469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1471 if (GET_MODE_SIZE (tmode) < max_size)
1472 mode = tmode;
1473
1474 if (mode == VOIDmode)
1475 break;
1476
1477 icode = mov_optab->handlers[(int) mode].insn_code;
1478 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1479 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1480
1481 max_size = GET_MODE_SIZE (mode);
1482 }
1483
1484 /* The code above should have handled everything. */
1485 if (data.len > 0)
1486 abort ();
1487 }
1488
1489 /* Return number of insns required to move L bytes by pieces.
1490 ALIGN (in bytes) is maximum alignment we can assume. */
1491
1492 static int
1493 move_by_pieces_ninsns (l, align)
1494 unsigned int l;
1495 unsigned int align;
1496 {
1497 register int n_insns = 0;
1498 unsigned int max_size = MOVE_MAX + 1;
1499
1500 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1501 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1502 align = MOVE_MAX * BITS_PER_UNIT;
1503
1504 while (max_size > 1)
1505 {
1506 enum machine_mode mode = VOIDmode, tmode;
1507 enum insn_code icode;
1508
1509 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1510 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1511 if (GET_MODE_SIZE (tmode) < max_size)
1512 mode = tmode;
1513
1514 if (mode == VOIDmode)
1515 break;
1516
1517 icode = mov_optab->handlers[(int) mode].insn_code;
1518 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1519 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1520
1521 max_size = GET_MODE_SIZE (mode);
1522 }
1523
1524 return n_insns;
1525 }
1526
1527 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1528 with move instructions for mode MODE. GENFUN is the gen_... function
1529 to make a move insn for that mode. DATA has all the other info. */
1530
1531 static void
1532 move_by_pieces_1 (genfun, mode, data)
1533 rtx (*genfun) PARAMS ((rtx, ...));
1534 enum machine_mode mode;
1535 struct move_by_pieces *data;
1536 {
1537 register int size = GET_MODE_SIZE (mode);
1538 register rtx to1, from1;
1539
1540 while (data->len >= size)
1541 {
1542 if (data->reverse) data->offset -= size;
1543
1544 to1 = (data->autinc_to
1545 ? gen_rtx_MEM (mode, data->to_addr)
1546 : copy_rtx (change_address (data->to, mode,
1547 plus_constant (data->to_addr,
1548 data->offset))));
1549 MEM_IN_STRUCT_P (to1) = data->to_struct;
1550 RTX_UNCHANGING_P (to1) = data->to_readonly;
1551
1552 from1
1553 = (data->autinc_from
1554 ? gen_rtx_MEM (mode, data->from_addr)
1555 : copy_rtx (change_address (data->from, mode,
1556 plus_constant (data->from_addr,
1557 data->offset))));
1558 MEM_IN_STRUCT_P (from1) = data->from_struct;
1559 RTX_UNCHANGING_P (from1) = data->from_readonly;
1560
1561 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1562 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1563 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1564 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1565
1566 emit_insn ((*genfun) (to1, from1));
1567 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1568 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1569 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1570 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1571
1572 if (! data->reverse) data->offset += size;
1573
1574 data->len -= size;
1575 }
1576 }
1577 \f
1578 /* Emit code to move a block Y to a block X.
1579 This may be done with string-move instructions,
1580 with multiple scalar move instructions, or with a library call.
1581
1582 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1583 with mode BLKmode.
1584 SIZE is an rtx that says how long they are.
1585 ALIGN is the maximum alignment we can assume they have.
1586
1587 Return the address of the new block, if memcpy is called and returns it,
1588 0 otherwise. */
1589
1590 rtx
1591 emit_block_move (x, y, size, align)
1592 rtx x, y;
1593 rtx size;
1594 unsigned int align;
1595 {
1596 rtx retval = 0;
1597 #ifdef TARGET_MEM_FUNCTIONS
1598 static tree fn;
1599 tree call_expr, arg_list;
1600 #endif
1601
1602 if (GET_MODE (x) != BLKmode)
1603 abort ();
1604
1605 if (GET_MODE (y) != BLKmode)
1606 abort ();
1607
1608 x = protect_from_queue (x, 1);
1609 y = protect_from_queue (y, 0);
1610 size = protect_from_queue (size, 0);
1611
1612 if (GET_CODE (x) != MEM)
1613 abort ();
1614 if (GET_CODE (y) != MEM)
1615 abort ();
1616 if (size == 0)
1617 abort ();
1618
1619 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1620 move_by_pieces (x, y, INTVAL (size), align);
1621 else
1622 {
1623 /* Try the most limited insn first, because there's no point
1624 including more than one in the machine description unless
1625 the more limited one has some advantage. */
1626
1627 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1628 enum machine_mode mode;
1629
1630 /* Since this is a move insn, we don't care about volatility. */
1631 volatile_ok = 1;
1632
1633 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1634 mode = GET_MODE_WIDER_MODE (mode))
1635 {
1636 enum insn_code code = movstr_optab[(int) mode];
1637 insn_operand_predicate_fn pred;
1638
1639 if (code != CODE_FOR_nothing
1640 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1641 here because if SIZE is less than the mode mask, as it is
1642 returned by the macro, it will definitely be less than the
1643 actual mode mask. */
1644 && ((GET_CODE (size) == CONST_INT
1645 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1646 <= (GET_MODE_MASK (mode) >> 1)))
1647 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1648 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1649 || (*pred) (x, BLKmode))
1650 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1651 || (*pred) (y, BLKmode))
1652 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1653 || (*pred) (opalign, VOIDmode)))
1654 {
1655 rtx op2;
1656 rtx last = get_last_insn ();
1657 rtx pat;
1658
1659 op2 = convert_to_mode (mode, size, 1);
1660 pred = insn_data[(int) code].operand[2].predicate;
1661 if (pred != 0 && ! (*pred) (op2, mode))
1662 op2 = copy_to_mode_reg (mode, op2);
1663
1664 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1665 if (pat)
1666 {
1667 emit_insn (pat);
1668 volatile_ok = 0;
1669 return 0;
1670 }
1671 else
1672 delete_insns_since (last);
1673 }
1674 }
1675
1676 volatile_ok = 0;
1677
1678 /* X, Y, or SIZE may have been passed through protect_from_queue.
1679
1680 It is unsafe to save the value generated by protect_from_queue
1681 and reuse it later. Consider what happens if emit_queue is
1682 called before the return value from protect_from_queue is used.
1683
1684 Expansion of the CALL_EXPR below will call emit_queue before
1685 we are finished emitting RTL for argument setup. So if we are
1686 not careful we could get the wrong value for an argument.
1687
1688 To avoid this problem we go ahead and emit code to copy X, Y &
1689 SIZE into new pseudos. We can then place those new pseudos
1690 into an RTL_EXPR and use them later, even after a call to
1691 emit_queue.
1692
1693 Note this is not strictly needed for library calls since they
1694 do not call emit_queue before loading their arguments. However,
1695 we may need to have library calls call emit_queue in the future
1696 since failing to do so could cause problems for targets which
1697 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1698 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1699 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1700
1701 #ifdef TARGET_MEM_FUNCTIONS
1702 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1703 #else
1704 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1705 TREE_UNSIGNED (integer_type_node));
1706 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1707 #endif
1708
1709 #ifdef TARGET_MEM_FUNCTIONS
1710 /* It is incorrect to use the libcall calling conventions to call
1711 memcpy in this context.
1712
1713 This could be a user call to memcpy and the user may wish to
1714 examine the return value from memcpy.
1715
1716 For targets where libcalls and normal calls have different conventions
1717 for returning pointers, we could end up generating incorrect code.
1718
1719 So instead of using a libcall sequence we build up a suitable
1720 CALL_EXPR and expand the call in the normal fashion. */
1721 if (fn == NULL_TREE)
1722 {
1723 tree fntype;
1724
1725 /* This was copied from except.c, I don't know if all this is
1726 necessary in this context or not. */
1727 fn = get_identifier ("memcpy");
1728 push_obstacks_nochange ();
1729 end_temporary_allocation ();
1730 fntype = build_pointer_type (void_type_node);
1731 fntype = build_function_type (fntype, NULL_TREE);
1732 fn = build_decl (FUNCTION_DECL, fn, fntype);
1733 ggc_add_tree_root (&fn, 1);
1734 DECL_EXTERNAL (fn) = 1;
1735 TREE_PUBLIC (fn) = 1;
1736 DECL_ARTIFICIAL (fn) = 1;
1737 make_decl_rtl (fn, NULL_PTR, 1);
1738 assemble_external (fn);
1739 pop_obstacks ();
1740 }
1741
1742 /* We need to make an argument list for the function call.
1743
1744 memcpy has three arguments, the first two are void * addresses and
1745 the last is a size_t byte count for the copy. */
1746 arg_list
1747 = build_tree_list (NULL_TREE,
1748 make_tree (build_pointer_type (void_type_node), x));
1749 TREE_CHAIN (arg_list)
1750 = build_tree_list (NULL_TREE,
1751 make_tree (build_pointer_type (void_type_node), y));
1752 TREE_CHAIN (TREE_CHAIN (arg_list))
1753 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1754 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1755
1756 /* Now we have to build up the CALL_EXPR itself. */
1757 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1758 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1759 call_expr, arg_list, NULL_TREE);
1760 TREE_SIDE_EFFECTS (call_expr) = 1;
1761
1762 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1763 #else
1764 emit_library_call (bcopy_libfunc, 0,
1765 VOIDmode, 3, y, Pmode, x, Pmode,
1766 convert_to_mode (TYPE_MODE (integer_type_node), size,
1767 TREE_UNSIGNED (integer_type_node)),
1768 TYPE_MODE (integer_type_node));
1769 #endif
1770 }
1771
1772 return retval;
1773 }
1774 \f
1775 /* Copy all or part of a value X into registers starting at REGNO.
1776 The number of registers to be filled is NREGS. */
1777
1778 void
1779 move_block_to_reg (regno, x, nregs, mode)
1780 int regno;
1781 rtx x;
1782 int nregs;
1783 enum machine_mode mode;
1784 {
1785 int i;
1786 #ifdef HAVE_load_multiple
1787 rtx pat;
1788 rtx last;
1789 #endif
1790
1791 if (nregs == 0)
1792 return;
1793
1794 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1795 x = validize_mem (force_const_mem (mode, x));
1796
1797 /* See if the machine can do this with a load multiple insn. */
1798 #ifdef HAVE_load_multiple
1799 if (HAVE_load_multiple)
1800 {
1801 last = get_last_insn ();
1802 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1803 GEN_INT (nregs));
1804 if (pat)
1805 {
1806 emit_insn (pat);
1807 return;
1808 }
1809 else
1810 delete_insns_since (last);
1811 }
1812 #endif
1813
1814 for (i = 0; i < nregs; i++)
1815 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1816 operand_subword_force (x, i, mode));
1817 }
1818
1819 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1820 The number of registers to be filled is NREGS. SIZE indicates the number
1821 of bytes in the object X. */
1822
1823
1824 void
1825 move_block_from_reg (regno, x, nregs, size)
1826 int regno;
1827 rtx x;
1828 int nregs;
1829 int size;
1830 {
1831 int i;
1832 #ifdef HAVE_store_multiple
1833 rtx pat;
1834 rtx last;
1835 #endif
1836 enum machine_mode mode;
1837
1838 /* If SIZE is that of a mode no bigger than a word, just use that
1839 mode's store operation. */
1840 if (size <= UNITS_PER_WORD
1841 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1842 {
1843 emit_move_insn (change_address (x, mode, NULL),
1844 gen_rtx_REG (mode, regno));
1845 return;
1846 }
1847
1848 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1849 to the left before storing to memory. Note that the previous test
1850 doesn't handle all cases (e.g. SIZE == 3). */
1851 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1852 {
1853 rtx tem = operand_subword (x, 0, 1, BLKmode);
1854 rtx shift;
1855
1856 if (tem == 0)
1857 abort ();
1858
1859 shift = expand_shift (LSHIFT_EXPR, word_mode,
1860 gen_rtx_REG (word_mode, regno),
1861 build_int_2 ((UNITS_PER_WORD - size)
1862 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1863 emit_move_insn (tem, shift);
1864 return;
1865 }
1866
1867 /* See if the machine can do this with a store multiple insn. */
1868 #ifdef HAVE_store_multiple
1869 if (HAVE_store_multiple)
1870 {
1871 last = get_last_insn ();
1872 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1873 GEN_INT (nregs));
1874 if (pat)
1875 {
1876 emit_insn (pat);
1877 return;
1878 }
1879 else
1880 delete_insns_since (last);
1881 }
1882 #endif
1883
1884 for (i = 0; i < nregs; i++)
1885 {
1886 rtx tem = operand_subword (x, i, 1, BLKmode);
1887
1888 if (tem == 0)
1889 abort ();
1890
1891 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1892 }
1893 }
1894
1895 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1896 registers represented by a PARALLEL. SSIZE represents the total size of
1897 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1898 SRC in bits. */
1899 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1900 the balance will be in what would be the low-order memory addresses, i.e.
1901 left justified for big endian, right justified for little endian. This
1902 happens to be true for the targets currently using this support. If this
1903 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1904 would be needed. */
1905
1906 void
1907 emit_group_load (dst, orig_src, ssize, align)
1908 rtx dst, orig_src;
1909 unsigned int align;
1910 int ssize;
1911 {
1912 rtx *tmps, src;
1913 int start, i;
1914
1915 if (GET_CODE (dst) != PARALLEL)
1916 abort ();
1917
1918 /* Check for a NULL entry, used to indicate that the parameter goes
1919 both on the stack and in registers. */
1920 if (XEXP (XVECEXP (dst, 0, 0), 0))
1921 start = 0;
1922 else
1923 start = 1;
1924
1925 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1926
1927 /* If we won't be loading directly from memory, protect the real source
1928 from strange tricks we might play. */
1929 src = orig_src;
1930 if (GET_CODE (src) != MEM)
1931 {
1932 if (GET_CODE (src) == VOIDmode)
1933 src = gen_reg_rtx (GET_MODE (dst));
1934 else
1935 src = gen_reg_rtx (GET_MODE (orig_src));
1936 emit_move_insn (src, orig_src);
1937 }
1938
1939 /* Process the pieces. */
1940 for (i = start; i < XVECLEN (dst, 0); i++)
1941 {
1942 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1943 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1944 unsigned int bytelen = GET_MODE_SIZE (mode);
1945 int shift = 0;
1946
1947 /* Handle trailing fragments that run over the size of the struct. */
1948 if (ssize >= 0 && bytepos + bytelen > ssize)
1949 {
1950 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1951 bytelen = ssize - bytepos;
1952 if (bytelen <= 0)
1953 abort ();
1954 }
1955
1956 /* Optimize the access just a bit. */
1957 if (GET_CODE (src) == MEM
1958 && align >= GET_MODE_ALIGNMENT (mode)
1959 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1960 && bytelen == GET_MODE_SIZE (mode))
1961 {
1962 tmps[i] = gen_reg_rtx (mode);
1963 emit_move_insn (tmps[i],
1964 change_address (src, mode,
1965 plus_constant (XEXP (src, 0),
1966 bytepos)));
1967 }
1968 else if (GET_CODE (src) == CONCAT)
1969 {
1970 if (bytepos == 0
1971 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1972 tmps[i] = XEXP (src, 0);
1973 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1974 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1975 tmps[i] = XEXP (src, 1);
1976 else
1977 abort ();
1978 }
1979 else
1980 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1981 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1982 mode, mode, align, ssize);
1983
1984 if (BYTES_BIG_ENDIAN && shift)
1985 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1986 tmps[i], 0, OPTAB_WIDEN);
1987 }
1988
1989 emit_queue();
1990
1991 /* Copy the extracted pieces into the proper (probable) hard regs. */
1992 for (i = start; i < XVECLEN (dst, 0); i++)
1993 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1994 }
1995
1996 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1997 registers represented by a PARALLEL. SSIZE represents the total size of
1998 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1999
2000 void
2001 emit_group_store (orig_dst, src, ssize, align)
2002 rtx orig_dst, src;
2003 int ssize;
2004 unsigned int align;
2005 {
2006 rtx *tmps, dst;
2007 int start, i;
2008
2009 if (GET_CODE (src) != PARALLEL)
2010 abort ();
2011
2012 /* Check for a NULL entry, used to indicate that the parameter goes
2013 both on the stack and in registers. */
2014 if (XEXP (XVECEXP (src, 0, 0), 0))
2015 start = 0;
2016 else
2017 start = 1;
2018
2019 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2020
2021 /* Copy the (probable) hard regs into pseudos. */
2022 for (i = start; i < XVECLEN (src, 0); i++)
2023 {
2024 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2025 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2026 emit_move_insn (tmps[i], reg);
2027 }
2028 emit_queue();
2029
2030 /* If we won't be storing directly into memory, protect the real destination
2031 from strange tricks we might play. */
2032 dst = orig_dst;
2033 if (GET_CODE (dst) == PARALLEL)
2034 {
2035 rtx temp;
2036
2037 /* We can get a PARALLEL dst if there is a conditional expression in
2038 a return statement. In that case, the dst and src are the same,
2039 so no action is necessary. */
2040 if (rtx_equal_p (dst, src))
2041 return;
2042
2043 /* It is unclear if we can ever reach here, but we may as well handle
2044 it. Allocate a temporary, and split this into a store/load to/from
2045 the temporary. */
2046
2047 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2048 emit_group_store (temp, src, ssize, align);
2049 emit_group_load (dst, temp, ssize, align);
2050 return;
2051 }
2052 else if (GET_CODE (dst) != MEM)
2053 {
2054 dst = gen_reg_rtx (GET_MODE (orig_dst));
2055 /* Make life a bit easier for combine. */
2056 emit_move_insn (dst, const0_rtx);
2057 }
2058 else if (! MEM_IN_STRUCT_P (dst))
2059 {
2060 /* store_bit_field requires that memory operations have
2061 mem_in_struct_p set; we might not. */
2062
2063 dst = copy_rtx (orig_dst);
2064 MEM_SET_IN_STRUCT_P (dst, 1);
2065 }
2066
2067 /* Process the pieces. */
2068 for (i = start; i < XVECLEN (src, 0); i++)
2069 {
2070 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2071 enum machine_mode mode = GET_MODE (tmps[i]);
2072 unsigned int bytelen = GET_MODE_SIZE (mode);
2073
2074 /* Handle trailing fragments that run over the size of the struct. */
2075 if (ssize >= 0 && bytepos + bytelen > ssize)
2076 {
2077 if (BYTES_BIG_ENDIAN)
2078 {
2079 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2080 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2081 tmps[i], 0, OPTAB_WIDEN);
2082 }
2083 bytelen = ssize - bytepos;
2084 }
2085
2086 /* Optimize the access just a bit. */
2087 if (GET_CODE (dst) == MEM
2088 && align >= GET_MODE_ALIGNMENT (mode)
2089 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2090 && bytelen == GET_MODE_SIZE (mode))
2091 emit_move_insn (change_address (dst, mode,
2092 plus_constant (XEXP (dst, 0),
2093 bytepos)),
2094 tmps[i]);
2095 else
2096 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2097 mode, tmps[i], align, ssize);
2098 }
2099
2100 emit_queue();
2101
2102 /* Copy from the pseudo into the (probable) hard reg. */
2103 if (GET_CODE (dst) == REG)
2104 emit_move_insn (orig_dst, dst);
2105 }
2106
2107 /* Generate code to copy a BLKmode object of TYPE out of a
2108 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2109 is null, a stack temporary is created. TGTBLK is returned.
2110
2111 The primary purpose of this routine is to handle functions
2112 that return BLKmode structures in registers. Some machines
2113 (the PA for example) want to return all small structures
2114 in registers regardless of the structure's alignment. */
2115
2116 rtx
2117 copy_blkmode_from_reg (tgtblk, srcreg, type)
2118 rtx tgtblk;
2119 rtx srcreg;
2120 tree type;
2121 {
2122 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2123 rtx src = NULL, dst = NULL;
2124 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2125 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2126
2127 if (tgtblk == 0)
2128 {
2129 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2130 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2131 preserve_temp_slots (tgtblk);
2132 }
2133
2134 /* This code assumes srcreg is at least a full word. If it isn't,
2135 copy it into a new pseudo which is a full word. */
2136 if (GET_MODE (srcreg) != BLKmode
2137 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2138 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2139
2140 /* Structures whose size is not a multiple of a word are aligned
2141 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2142 machine, this means we must skip the empty high order bytes when
2143 calculating the bit offset. */
2144 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2145 big_endian_correction
2146 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2147
2148 /* Copy the structure BITSIZE bites at a time.
2149
2150 We could probably emit more efficient code for machines which do not use
2151 strict alignment, but it doesn't seem worth the effort at the current
2152 time. */
2153 for (bitpos = 0, xbitpos = big_endian_correction;
2154 bitpos < bytes * BITS_PER_UNIT;
2155 bitpos += bitsize, xbitpos += bitsize)
2156 {
2157 /* We need a new source operand each time xbitpos is on a
2158 word boundary and when xbitpos == big_endian_correction
2159 (the first time through). */
2160 if (xbitpos % BITS_PER_WORD == 0
2161 || xbitpos == big_endian_correction)
2162 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2163
2164 /* We need a new destination operand each time bitpos is on
2165 a word boundary. */
2166 if (bitpos % BITS_PER_WORD == 0)
2167 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2168
2169 /* Use xbitpos for the source extraction (right justified) and
2170 xbitpos for the destination store (left justified). */
2171 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2172 extract_bit_field (src, bitsize,
2173 xbitpos % BITS_PER_WORD, 1,
2174 NULL_RTX, word_mode, word_mode,
2175 bitsize, BITS_PER_WORD),
2176 bitsize, BITS_PER_WORD);
2177 }
2178
2179 return tgtblk;
2180 }
2181
2182
2183 /* Add a USE expression for REG to the (possibly empty) list pointed
2184 to by CALL_FUSAGE. REG must denote a hard register. */
2185
2186 void
2187 use_reg (call_fusage, reg)
2188 rtx *call_fusage, reg;
2189 {
2190 if (GET_CODE (reg) != REG
2191 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2192 abort();
2193
2194 *call_fusage
2195 = gen_rtx_EXPR_LIST (VOIDmode,
2196 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2197 }
2198
2199 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2200 starting at REGNO. All of these registers must be hard registers. */
2201
2202 void
2203 use_regs (call_fusage, regno, nregs)
2204 rtx *call_fusage;
2205 int regno;
2206 int nregs;
2207 {
2208 int i;
2209
2210 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2211 abort ();
2212
2213 for (i = 0; i < nregs; i++)
2214 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2215 }
2216
2217 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2218 PARALLEL REGS. This is for calls that pass values in multiple
2219 non-contiguous locations. The Irix 6 ABI has examples of this. */
2220
2221 void
2222 use_group_regs (call_fusage, regs)
2223 rtx *call_fusage;
2224 rtx regs;
2225 {
2226 int i;
2227
2228 for (i = 0; i < XVECLEN (regs, 0); i++)
2229 {
2230 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2231
2232 /* A NULL entry means the parameter goes both on the stack and in
2233 registers. This can also be a MEM for targets that pass values
2234 partially on the stack and partially in registers. */
2235 if (reg != 0 && GET_CODE (reg) == REG)
2236 use_reg (call_fusage, reg);
2237 }
2238 }
2239 \f
2240 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2241 rtx with BLKmode). The caller must pass TO through protect_from_queue
2242 before calling. ALIGN is maximum alignment we can assume. */
2243
2244 static void
2245 clear_by_pieces (to, len, align)
2246 rtx to;
2247 int len;
2248 unsigned int align;
2249 {
2250 struct clear_by_pieces data;
2251 rtx to_addr = XEXP (to, 0);
2252 unsigned int max_size = MOVE_MAX_PIECES + 1;
2253 enum machine_mode mode = VOIDmode, tmode;
2254 enum insn_code icode;
2255
2256 data.offset = 0;
2257 data.to_addr = to_addr;
2258 data.to = to;
2259 data.autinc_to
2260 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2261 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2262
2263 data.explicit_inc_to = 0;
2264 data.reverse
2265 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2266 if (data.reverse) data.offset = len;
2267 data.len = len;
2268
2269 data.to_struct = MEM_IN_STRUCT_P (to);
2270
2271 /* If copying requires more than two move insns,
2272 copy addresses to registers (to make displacements shorter)
2273 and use post-increment if available. */
2274 if (!data.autinc_to
2275 && move_by_pieces_ninsns (len, align) > 2)
2276 {
2277 /* Determine the main mode we'll be using */
2278 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2279 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2280 if (GET_MODE_SIZE (tmode) < max_size)
2281 mode = tmode;
2282
2283 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2284 {
2285 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2286 data.autinc_to = 1;
2287 data.explicit_inc_to = -1;
2288 }
2289 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2290 {
2291 data.to_addr = copy_addr_to_reg (to_addr);
2292 data.autinc_to = 1;
2293 data.explicit_inc_to = 1;
2294 }
2295 if (!data.autinc_to && CONSTANT_P (to_addr))
2296 data.to_addr = copy_addr_to_reg (to_addr);
2297 }
2298
2299 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2300 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2301 align = MOVE_MAX * BITS_PER_UNIT;
2302
2303 /* First move what we can in the largest integer mode, then go to
2304 successively smaller modes. */
2305
2306 while (max_size > 1)
2307 {
2308 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2309 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2310 if (GET_MODE_SIZE (tmode) < max_size)
2311 mode = tmode;
2312
2313 if (mode == VOIDmode)
2314 break;
2315
2316 icode = mov_optab->handlers[(int) mode].insn_code;
2317 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2318 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2319
2320 max_size = GET_MODE_SIZE (mode);
2321 }
2322
2323 /* The code above should have handled everything. */
2324 if (data.len != 0)
2325 abort ();
2326 }
2327
2328 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2329 with move instructions for mode MODE. GENFUN is the gen_... function
2330 to make a move insn for that mode. DATA has all the other info. */
2331
2332 static void
2333 clear_by_pieces_1 (genfun, mode, data)
2334 rtx (*genfun) PARAMS ((rtx, ...));
2335 enum machine_mode mode;
2336 struct clear_by_pieces *data;
2337 {
2338 register int size = GET_MODE_SIZE (mode);
2339 register rtx to1;
2340
2341 while (data->len >= size)
2342 {
2343 if (data->reverse) data->offset -= size;
2344
2345 to1 = (data->autinc_to
2346 ? gen_rtx_MEM (mode, data->to_addr)
2347 : copy_rtx (change_address (data->to, mode,
2348 plus_constant (data->to_addr,
2349 data->offset))));
2350 MEM_IN_STRUCT_P (to1) = data->to_struct;
2351
2352 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2353 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2354
2355 emit_insn ((*genfun) (to1, const0_rtx));
2356 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2357 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2358
2359 if (! data->reverse) data->offset += size;
2360
2361 data->len -= size;
2362 }
2363 }
2364 \f
2365 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2366 its length in bytes and ALIGN is the maximum alignment we can is has.
2367
2368 If we call a function that returns the length of the block, return it. */
2369
2370 rtx
2371 clear_storage (object, size, align)
2372 rtx object;
2373 rtx size;
2374 unsigned int align;
2375 {
2376 #ifdef TARGET_MEM_FUNCTIONS
2377 static tree fn;
2378 tree call_expr, arg_list;
2379 #endif
2380 rtx retval = 0;
2381
2382 if (GET_MODE (object) == BLKmode)
2383 {
2384 object = protect_from_queue (object, 1);
2385 size = protect_from_queue (size, 0);
2386
2387 if (GET_CODE (size) == CONST_INT
2388 && MOVE_BY_PIECES_P (INTVAL (size), align))
2389 clear_by_pieces (object, INTVAL (size), align);
2390 else
2391 {
2392 /* Try the most limited insn first, because there's no point
2393 including more than one in the machine description unless
2394 the more limited one has some advantage. */
2395
2396 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2397 enum machine_mode mode;
2398
2399 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2400 mode = GET_MODE_WIDER_MODE (mode))
2401 {
2402 enum insn_code code = clrstr_optab[(int) mode];
2403 insn_operand_predicate_fn pred;
2404
2405 if (code != CODE_FOR_nothing
2406 /* We don't need MODE to be narrower than
2407 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2408 the mode mask, as it is returned by the macro, it will
2409 definitely be less than the actual mode mask. */
2410 && ((GET_CODE (size) == CONST_INT
2411 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2412 <= (GET_MODE_MASK (mode) >> 1)))
2413 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2414 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2415 || (*pred) (object, BLKmode))
2416 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2417 || (*pred) (opalign, VOIDmode)))
2418 {
2419 rtx op1;
2420 rtx last = get_last_insn ();
2421 rtx pat;
2422
2423 op1 = convert_to_mode (mode, size, 1);
2424 pred = insn_data[(int) code].operand[1].predicate;
2425 if (pred != 0 && ! (*pred) (op1, mode))
2426 op1 = copy_to_mode_reg (mode, op1);
2427
2428 pat = GEN_FCN ((int) code) (object, op1, opalign);
2429 if (pat)
2430 {
2431 emit_insn (pat);
2432 return 0;
2433 }
2434 else
2435 delete_insns_since (last);
2436 }
2437 }
2438
2439 /* OBJECT or SIZE may have been passed through protect_from_queue.
2440
2441 It is unsafe to save the value generated by protect_from_queue
2442 and reuse it later. Consider what happens if emit_queue is
2443 called before the return value from protect_from_queue is used.
2444
2445 Expansion of the CALL_EXPR below will call emit_queue before
2446 we are finished emitting RTL for argument setup. So if we are
2447 not careful we could get the wrong value for an argument.
2448
2449 To avoid this problem we go ahead and emit code to copy OBJECT
2450 and SIZE into new pseudos. We can then place those new pseudos
2451 into an RTL_EXPR and use them later, even after a call to
2452 emit_queue.
2453
2454 Note this is not strictly needed for library calls since they
2455 do not call emit_queue before loading their arguments. However,
2456 we may need to have library calls call emit_queue in the future
2457 since failing to do so could cause problems for targets which
2458 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2459 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2460
2461 #ifdef TARGET_MEM_FUNCTIONS
2462 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2463 #else
2464 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2465 TREE_UNSIGNED (integer_type_node));
2466 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2467 #endif
2468
2469
2470 #ifdef TARGET_MEM_FUNCTIONS
2471 /* It is incorrect to use the libcall calling conventions to call
2472 memset in this context.
2473
2474 This could be a user call to memset and the user may wish to
2475 examine the return value from memset.
2476
2477 For targets where libcalls and normal calls have different
2478 conventions for returning pointers, we could end up generating
2479 incorrect code.
2480
2481 So instead of using a libcall sequence we build up a suitable
2482 CALL_EXPR and expand the call in the normal fashion. */
2483 if (fn == NULL_TREE)
2484 {
2485 tree fntype;
2486
2487 /* This was copied from except.c, I don't know if all this is
2488 necessary in this context or not. */
2489 fn = get_identifier ("memset");
2490 push_obstacks_nochange ();
2491 end_temporary_allocation ();
2492 fntype = build_pointer_type (void_type_node);
2493 fntype = build_function_type (fntype, NULL_TREE);
2494 fn = build_decl (FUNCTION_DECL, fn, fntype);
2495 ggc_add_tree_root (&fn, 1);
2496 DECL_EXTERNAL (fn) = 1;
2497 TREE_PUBLIC (fn) = 1;
2498 DECL_ARTIFICIAL (fn) = 1;
2499 make_decl_rtl (fn, NULL_PTR, 1);
2500 assemble_external (fn);
2501 pop_obstacks ();
2502 }
2503
2504 /* We need to make an argument list for the function call.
2505
2506 memset has three arguments, the first is a void * addresses, the
2507 second a integer with the initialization value, the last is a
2508 size_t byte count for the copy. */
2509 arg_list
2510 = build_tree_list (NULL_TREE,
2511 make_tree (build_pointer_type (void_type_node),
2512 object));
2513 TREE_CHAIN (arg_list)
2514 = build_tree_list (NULL_TREE,
2515 make_tree (integer_type_node, const0_rtx));
2516 TREE_CHAIN (TREE_CHAIN (arg_list))
2517 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2518 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2519
2520 /* Now we have to build up the CALL_EXPR itself. */
2521 call_expr = build1 (ADDR_EXPR,
2522 build_pointer_type (TREE_TYPE (fn)), fn);
2523 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2524 call_expr, arg_list, NULL_TREE);
2525 TREE_SIDE_EFFECTS (call_expr) = 1;
2526
2527 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2528 #else
2529 emit_library_call (bzero_libfunc, 0,
2530 VOIDmode, 2, object, Pmode, size,
2531 TYPE_MODE (integer_type_node));
2532 #endif
2533 }
2534 }
2535 else
2536 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2537
2538 return retval;
2539 }
2540
2541 /* Generate code to copy Y into X.
2542 Both Y and X must have the same mode, except that
2543 Y can be a constant with VOIDmode.
2544 This mode cannot be BLKmode; use emit_block_move for that.
2545
2546 Return the last instruction emitted. */
2547
2548 rtx
2549 emit_move_insn (x, y)
2550 rtx x, y;
2551 {
2552 enum machine_mode mode = GET_MODE (x);
2553
2554 x = protect_from_queue (x, 1);
2555 y = protect_from_queue (y, 0);
2556
2557 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2558 abort ();
2559
2560 /* Never force constant_p_rtx to memory. */
2561 if (GET_CODE (y) == CONSTANT_P_RTX)
2562 ;
2563 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2564 y = force_const_mem (mode, y);
2565
2566 /* If X or Y are memory references, verify that their addresses are valid
2567 for the machine. */
2568 if (GET_CODE (x) == MEM
2569 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2570 && ! push_operand (x, GET_MODE (x)))
2571 || (flag_force_addr
2572 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2573 x = change_address (x, VOIDmode, XEXP (x, 0));
2574
2575 if (GET_CODE (y) == MEM
2576 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2577 || (flag_force_addr
2578 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2579 y = change_address (y, VOIDmode, XEXP (y, 0));
2580
2581 if (mode == BLKmode)
2582 abort ();
2583
2584 return emit_move_insn_1 (x, y);
2585 }
2586
2587 /* Low level part of emit_move_insn.
2588 Called just like emit_move_insn, but assumes X and Y
2589 are basically valid. */
2590
2591 rtx
2592 emit_move_insn_1 (x, y)
2593 rtx x, y;
2594 {
2595 enum machine_mode mode = GET_MODE (x);
2596 enum machine_mode submode;
2597 enum mode_class class = GET_MODE_CLASS (mode);
2598 unsigned int i;
2599
2600 if (mode >= MAX_MACHINE_MODE)
2601 abort ();
2602
2603 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2604 return
2605 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2606
2607 /* Expand complex moves by moving real part and imag part, if possible. */
2608 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2609 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2610 * BITS_PER_UNIT),
2611 (class == MODE_COMPLEX_INT
2612 ? MODE_INT : MODE_FLOAT),
2613 0))
2614 && (mov_optab->handlers[(int) submode].insn_code
2615 != CODE_FOR_nothing))
2616 {
2617 /* Don't split destination if it is a stack push. */
2618 int stack = push_operand (x, GET_MODE (x));
2619
2620 /* If this is a stack, push the highpart first, so it
2621 will be in the argument order.
2622
2623 In that case, change_address is used only to convert
2624 the mode, not to change the address. */
2625 if (stack)
2626 {
2627 /* Note that the real part always precedes the imag part in memory
2628 regardless of machine's endianness. */
2629 #ifdef STACK_GROWS_DOWNWARD
2630 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2631 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2632 gen_imagpart (submode, y)));
2633 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2634 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2635 gen_realpart (submode, y)));
2636 #else
2637 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2638 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2639 gen_realpart (submode, y)));
2640 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2641 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2642 gen_imagpart (submode, y)));
2643 #endif
2644 }
2645 else
2646 {
2647 rtx realpart_x, realpart_y;
2648 rtx imagpart_x, imagpart_y;
2649
2650 /* If this is a complex value with each part being smaller than a
2651 word, the usual calling sequence will likely pack the pieces into
2652 a single register. Unfortunately, SUBREG of hard registers only
2653 deals in terms of words, so we have a problem converting input
2654 arguments to the CONCAT of two registers that is used elsewhere
2655 for complex values. If this is before reload, we can copy it into
2656 memory and reload. FIXME, we should see about using extract and
2657 insert on integer registers, but complex short and complex char
2658 variables should be rarely used. */
2659 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2660 && (reload_in_progress | reload_completed) == 0)
2661 {
2662 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2663 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2664
2665 if (packed_dest_p || packed_src_p)
2666 {
2667 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2668 ? MODE_FLOAT : MODE_INT);
2669
2670 enum machine_mode reg_mode =
2671 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2672
2673 if (reg_mode != BLKmode)
2674 {
2675 rtx mem = assign_stack_temp (reg_mode,
2676 GET_MODE_SIZE (mode), 0);
2677
2678 rtx cmem = change_address (mem, mode, NULL_RTX);
2679
2680 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2681
2682 if (packed_dest_p)
2683 {
2684 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2685 emit_move_insn_1 (cmem, y);
2686 return emit_move_insn_1 (sreg, mem);
2687 }
2688 else
2689 {
2690 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2691 emit_move_insn_1 (mem, sreg);
2692 return emit_move_insn_1 (x, cmem);
2693 }
2694 }
2695 }
2696 }
2697
2698 realpart_x = gen_realpart (submode, x);
2699 realpart_y = gen_realpart (submode, y);
2700 imagpart_x = gen_imagpart (submode, x);
2701 imagpart_y = gen_imagpart (submode, y);
2702
2703 /* Show the output dies here. This is necessary for SUBREGs
2704 of pseudos since we cannot track their lifetimes correctly;
2705 hard regs shouldn't appear here except as return values.
2706 We never want to emit such a clobber after reload. */
2707 if (x != y
2708 && ! (reload_in_progress || reload_completed)
2709 && (GET_CODE (realpart_x) == SUBREG
2710 || GET_CODE (imagpart_x) == SUBREG))
2711 {
2712 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2713 }
2714
2715 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2716 (realpart_x, realpart_y));
2717 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2718 (imagpart_x, imagpart_y));
2719 }
2720
2721 return get_last_insn ();
2722 }
2723
2724 /* This will handle any multi-word mode that lacks a move_insn pattern.
2725 However, you will get better code if you define such patterns,
2726 even if they must turn into multiple assembler instructions. */
2727 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2728 {
2729 rtx last_insn = 0;
2730 rtx seq, inner;
2731 int need_clobber;
2732
2733 #ifdef PUSH_ROUNDING
2734
2735 /* If X is a push on the stack, do the push now and replace
2736 X with a reference to the stack pointer. */
2737 if (push_operand (x, GET_MODE (x)))
2738 {
2739 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2740 x = change_address (x, VOIDmode, stack_pointer_rtx);
2741 }
2742 #endif
2743
2744 /* If we are in reload, see if either operand is a MEM whose address
2745 is scheduled for replacement. */
2746 if (reload_in_progress && GET_CODE (x) == MEM
2747 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2748 {
2749 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2750
2751 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
2752 MEM_COPY_ATTRIBUTES (new, x);
2753 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
2754 x = new;
2755 }
2756 if (reload_in_progress && GET_CODE (y) == MEM
2757 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2758 {
2759 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2760
2761 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (y);
2762 MEM_COPY_ATTRIBUTES (new, y);
2763 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (y);
2764 y = new;
2765 }
2766
2767 start_sequence ();
2768
2769 need_clobber = 0;
2770 for (i = 0;
2771 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2772 i++)
2773 {
2774 rtx xpart = operand_subword (x, i, 1, mode);
2775 rtx ypart = operand_subword (y, i, 1, mode);
2776
2777 /* If we can't get a part of Y, put Y into memory if it is a
2778 constant. Otherwise, force it into a register. If we still
2779 can't get a part of Y, abort. */
2780 if (ypart == 0 && CONSTANT_P (y))
2781 {
2782 y = force_const_mem (mode, y);
2783 ypart = operand_subword (y, i, 1, mode);
2784 }
2785 else if (ypart == 0)
2786 ypart = operand_subword_force (y, i, mode);
2787
2788 if (xpart == 0 || ypart == 0)
2789 abort ();
2790
2791 need_clobber |= (GET_CODE (xpart) == SUBREG);
2792
2793 last_insn = emit_move_insn (xpart, ypart);
2794 }
2795
2796 seq = gen_sequence ();
2797 end_sequence ();
2798
2799 /* Show the output dies here. This is necessary for SUBREGs
2800 of pseudos since we cannot track their lifetimes correctly;
2801 hard regs shouldn't appear here except as return values.
2802 We never want to emit such a clobber after reload. */
2803 if (x != y
2804 && ! (reload_in_progress || reload_completed)
2805 && need_clobber != 0)
2806 {
2807 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2808 }
2809
2810 emit_insn (seq);
2811
2812 return last_insn;
2813 }
2814 else
2815 abort ();
2816 }
2817 \f
2818 /* Pushing data onto the stack. */
2819
2820 /* Push a block of length SIZE (perhaps variable)
2821 and return an rtx to address the beginning of the block.
2822 Note that it is not possible for the value returned to be a QUEUED.
2823 The value may be virtual_outgoing_args_rtx.
2824
2825 EXTRA is the number of bytes of padding to push in addition to SIZE.
2826 BELOW nonzero means this padding comes at low addresses;
2827 otherwise, the padding comes at high addresses. */
2828
2829 rtx
2830 push_block (size, extra, below)
2831 rtx size;
2832 int extra, below;
2833 {
2834 register rtx temp;
2835
2836 size = convert_modes (Pmode, ptr_mode, size, 1);
2837 if (CONSTANT_P (size))
2838 anti_adjust_stack (plus_constant (size, extra));
2839 else if (GET_CODE (size) == REG && extra == 0)
2840 anti_adjust_stack (size);
2841 else
2842 {
2843 rtx temp = copy_to_mode_reg (Pmode, size);
2844 if (extra != 0)
2845 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2846 temp, 0, OPTAB_LIB_WIDEN);
2847 anti_adjust_stack (temp);
2848 }
2849
2850 #ifndef STACK_GROWS_DOWNWARD
2851 #ifdef ARGS_GROW_DOWNWARD
2852 if (!ACCUMULATE_OUTGOING_ARGS)
2853 #else
2854 if (0)
2855 #endif
2856 #else
2857 if (1)
2858 #endif
2859 {
2860
2861 /* Return the lowest stack address when STACK or ARGS grow downward and
2862 we are not aaccumulating outgoing arguments (the c4x port uses such
2863 conventions). */
2864 temp = virtual_outgoing_args_rtx;
2865 if (extra != 0 && below)
2866 temp = plus_constant (temp, extra);
2867 }
2868 else
2869 {
2870 if (GET_CODE (size) == CONST_INT)
2871 temp = plus_constant (virtual_outgoing_args_rtx,
2872 - INTVAL (size) - (below ? 0 : extra));
2873 else if (extra != 0 && !below)
2874 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2875 negate_rtx (Pmode, plus_constant (size, extra)));
2876 else
2877 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2878 negate_rtx (Pmode, size));
2879 }
2880
2881 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2882 }
2883
2884 rtx
2885 gen_push_operand ()
2886 {
2887 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2888 }
2889
2890 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2891 block of SIZE bytes. */
2892
2893 static rtx
2894 get_push_address (size)
2895 int size;
2896 {
2897 register rtx temp;
2898
2899 if (STACK_PUSH_CODE == POST_DEC)
2900 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2901 else if (STACK_PUSH_CODE == POST_INC)
2902 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2903 else
2904 temp = stack_pointer_rtx;
2905
2906 return copy_to_reg (temp);
2907 }
2908
2909 /* Generate code to push X onto the stack, assuming it has mode MODE and
2910 type TYPE.
2911 MODE is redundant except when X is a CONST_INT (since they don't
2912 carry mode info).
2913 SIZE is an rtx for the size of data to be copied (in bytes),
2914 needed only if X is BLKmode.
2915
2916 ALIGN is maximum alignment we can assume.
2917
2918 If PARTIAL and REG are both nonzero, then copy that many of the first
2919 words of X into registers starting with REG, and push the rest of X.
2920 The amount of space pushed is decreased by PARTIAL words,
2921 rounded *down* to a multiple of PARM_BOUNDARY.
2922 REG must be a hard register in this case.
2923 If REG is zero but PARTIAL is not, take any all others actions for an
2924 argument partially in registers, but do not actually load any
2925 registers.
2926
2927 EXTRA is the amount in bytes of extra space to leave next to this arg.
2928 This is ignored if an argument block has already been allocated.
2929
2930 On a machine that lacks real push insns, ARGS_ADDR is the address of
2931 the bottom of the argument block for this call. We use indexing off there
2932 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2933 argument block has not been preallocated.
2934
2935 ARGS_SO_FAR is the size of args previously pushed for this call.
2936
2937 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2938 for arguments passed in registers. If nonzero, it will be the number
2939 of bytes required. */
2940
2941 void
2942 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2943 args_addr, args_so_far, reg_parm_stack_space,
2944 alignment_pad)
2945 register rtx x;
2946 enum machine_mode mode;
2947 tree type;
2948 rtx size;
2949 unsigned int align;
2950 int partial;
2951 rtx reg;
2952 int extra;
2953 rtx args_addr;
2954 rtx args_so_far;
2955 int reg_parm_stack_space;
2956 rtx alignment_pad;
2957 {
2958 rtx xinner;
2959 enum direction stack_direction
2960 #ifdef STACK_GROWS_DOWNWARD
2961 = downward;
2962 #else
2963 = upward;
2964 #endif
2965
2966 /* Decide where to pad the argument: `downward' for below,
2967 `upward' for above, or `none' for don't pad it.
2968 Default is below for small data on big-endian machines; else above. */
2969 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2970
2971 /* Invert direction if stack is post-update. */
2972 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2973 if (where_pad != none)
2974 where_pad = (where_pad == downward ? upward : downward);
2975
2976 xinner = x = protect_from_queue (x, 0);
2977
2978 if (mode == BLKmode)
2979 {
2980 /* Copy a block into the stack, entirely or partially. */
2981
2982 register rtx temp;
2983 int used = partial * UNITS_PER_WORD;
2984 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2985 int skip;
2986
2987 if (size == 0)
2988 abort ();
2989
2990 used -= offset;
2991
2992 /* USED is now the # of bytes we need not copy to the stack
2993 because registers will take care of them. */
2994
2995 if (partial != 0)
2996 xinner = change_address (xinner, BLKmode,
2997 plus_constant (XEXP (xinner, 0), used));
2998
2999 /* If the partial register-part of the arg counts in its stack size,
3000 skip the part of stack space corresponding to the registers.
3001 Otherwise, start copying to the beginning of the stack space,
3002 by setting SKIP to 0. */
3003 skip = (reg_parm_stack_space == 0) ? 0 : used;
3004
3005 #ifdef PUSH_ROUNDING
3006 /* Do it with several push insns if that doesn't take lots of insns
3007 and if there is no difficulty with push insns that skip bytes
3008 on the stack for alignment purposes. */
3009 if (args_addr == 0
3010 && PUSH_ARGS
3011 && GET_CODE (size) == CONST_INT
3012 && skip == 0
3013 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3014 /* Here we avoid the case of a structure whose weak alignment
3015 forces many pushes of a small amount of data,
3016 and such small pushes do rounding that causes trouble. */
3017 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3018 || align >= BIGGEST_ALIGNMENT
3019 || PUSH_ROUNDING (align) == align)
3020 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3021 {
3022 /* Push padding now if padding above and stack grows down,
3023 or if padding below and stack grows up.
3024 But if space already allocated, this has already been done. */
3025 if (extra && args_addr == 0
3026 && where_pad != none && where_pad != stack_direction)
3027 anti_adjust_stack (GEN_INT (extra));
3028
3029 stack_pointer_delta += INTVAL (size) - used;
3030 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3031 INTVAL (size) - used, align);
3032
3033 if (current_function_check_memory_usage && ! in_check_memory_usage)
3034 {
3035 rtx temp;
3036
3037 in_check_memory_usage = 1;
3038 temp = get_push_address (INTVAL(size) - used);
3039 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3040 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3041 temp, Pmode,
3042 XEXP (xinner, 0), Pmode,
3043 GEN_INT (INTVAL(size) - used),
3044 TYPE_MODE (sizetype));
3045 else
3046 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3047 temp, Pmode,
3048 GEN_INT (INTVAL(size) - used),
3049 TYPE_MODE (sizetype),
3050 GEN_INT (MEMORY_USE_RW),
3051 TYPE_MODE (integer_type_node));
3052 in_check_memory_usage = 0;
3053 }
3054 }
3055 else
3056 #endif /* PUSH_ROUNDING */
3057 {
3058 /* Otherwise make space on the stack and copy the data
3059 to the address of that space. */
3060
3061 /* Deduct words put into registers from the size we must copy. */
3062 if (partial != 0)
3063 {
3064 if (GET_CODE (size) == CONST_INT)
3065 size = GEN_INT (INTVAL (size) - used);
3066 else
3067 size = expand_binop (GET_MODE (size), sub_optab, size,
3068 GEN_INT (used), NULL_RTX, 0,
3069 OPTAB_LIB_WIDEN);
3070 }
3071
3072 /* Get the address of the stack space.
3073 In this case, we do not deal with EXTRA separately.
3074 A single stack adjust will do. */
3075 if (! args_addr)
3076 {
3077 temp = push_block (size, extra, where_pad == downward);
3078 extra = 0;
3079 }
3080 else if (GET_CODE (args_so_far) == CONST_INT)
3081 temp = memory_address (BLKmode,
3082 plus_constant (args_addr,
3083 skip + INTVAL (args_so_far)));
3084 else
3085 temp = memory_address (BLKmode,
3086 plus_constant (gen_rtx_PLUS (Pmode,
3087 args_addr,
3088 args_so_far),
3089 skip));
3090 if (current_function_check_memory_usage && ! in_check_memory_usage)
3091 {
3092 rtx target;
3093
3094 in_check_memory_usage = 1;
3095 target = copy_to_reg (temp);
3096 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3097 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3098 target, Pmode,
3099 XEXP (xinner, 0), Pmode,
3100 size, TYPE_MODE (sizetype));
3101 else
3102 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3103 target, Pmode,
3104 size, TYPE_MODE (sizetype),
3105 GEN_INT (MEMORY_USE_RW),
3106 TYPE_MODE (integer_type_node));
3107 in_check_memory_usage = 0;
3108 }
3109
3110 /* TEMP is the address of the block. Copy the data there. */
3111 if (GET_CODE (size) == CONST_INT
3112 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3113 {
3114 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3115 INTVAL (size), align);
3116 goto ret;
3117 }
3118 else
3119 {
3120 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3121 enum machine_mode mode;
3122 rtx target = gen_rtx_MEM (BLKmode, temp);
3123
3124 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3125 mode != VOIDmode;
3126 mode = GET_MODE_WIDER_MODE (mode))
3127 {
3128 enum insn_code code = movstr_optab[(int) mode];
3129 insn_operand_predicate_fn pred;
3130
3131 if (code != CODE_FOR_nothing
3132 && ((GET_CODE (size) == CONST_INT
3133 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3134 <= (GET_MODE_MASK (mode) >> 1)))
3135 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3136 && (!(pred = insn_data[(int) code].operand[0].predicate)
3137 || ((*pred) (target, BLKmode)))
3138 && (!(pred = insn_data[(int) code].operand[1].predicate)
3139 || ((*pred) (xinner, BLKmode)))
3140 && (!(pred = insn_data[(int) code].operand[3].predicate)
3141 || ((*pred) (opalign, VOIDmode))))
3142 {
3143 rtx op2 = convert_to_mode (mode, size, 1);
3144 rtx last = get_last_insn ();
3145 rtx pat;
3146
3147 pred = insn_data[(int) code].operand[2].predicate;
3148 if (pred != 0 && ! (*pred) (op2, mode))
3149 op2 = copy_to_mode_reg (mode, op2);
3150
3151 pat = GEN_FCN ((int) code) (target, xinner,
3152 op2, opalign);
3153 if (pat)
3154 {
3155 emit_insn (pat);
3156 goto ret;
3157 }
3158 else
3159 delete_insns_since (last);
3160 }
3161 }
3162 }
3163
3164 if (!ACCUMULATE_OUTGOING_ARGS)
3165 {
3166 /* If the source is referenced relative to the stack pointer,
3167 copy it to another register to stabilize it. We do not need
3168 to do this if we know that we won't be changing sp. */
3169
3170 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3171 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3172 temp = copy_to_reg (temp);
3173 }
3174
3175 /* Make inhibit_defer_pop nonzero around the library call
3176 to force it to pop the bcopy-arguments right away. */
3177 NO_DEFER_POP;
3178 #ifdef TARGET_MEM_FUNCTIONS
3179 emit_library_call (memcpy_libfunc, 0,
3180 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3181 convert_to_mode (TYPE_MODE (sizetype),
3182 size, TREE_UNSIGNED (sizetype)),
3183 TYPE_MODE (sizetype));
3184 #else
3185 emit_library_call (bcopy_libfunc, 0,
3186 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3187 convert_to_mode (TYPE_MODE (integer_type_node),
3188 size,
3189 TREE_UNSIGNED (integer_type_node)),
3190 TYPE_MODE (integer_type_node));
3191 #endif
3192 OK_DEFER_POP;
3193 }
3194 }
3195 else if (partial > 0)
3196 {
3197 /* Scalar partly in registers. */
3198
3199 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3200 int i;
3201 int not_stack;
3202 /* # words of start of argument
3203 that we must make space for but need not store. */
3204 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3205 int args_offset = INTVAL (args_so_far);
3206 int skip;
3207
3208 /* Push padding now if padding above and stack grows down,
3209 or if padding below and stack grows up.
3210 But if space already allocated, this has already been done. */
3211 if (extra && args_addr == 0
3212 && where_pad != none && where_pad != stack_direction)
3213 anti_adjust_stack (GEN_INT (extra));
3214
3215 /* If we make space by pushing it, we might as well push
3216 the real data. Otherwise, we can leave OFFSET nonzero
3217 and leave the space uninitialized. */
3218 if (args_addr == 0)
3219 offset = 0;
3220
3221 /* Now NOT_STACK gets the number of words that we don't need to
3222 allocate on the stack. */
3223 not_stack = partial - offset;
3224
3225 /* If the partial register-part of the arg counts in its stack size,
3226 skip the part of stack space corresponding to the registers.
3227 Otherwise, start copying to the beginning of the stack space,
3228 by setting SKIP to 0. */
3229 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3230
3231 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3232 x = validize_mem (force_const_mem (mode, x));
3233
3234 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3235 SUBREGs of such registers are not allowed. */
3236 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3237 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3238 x = copy_to_reg (x);
3239
3240 /* Loop over all the words allocated on the stack for this arg. */
3241 /* We can do it by words, because any scalar bigger than a word
3242 has a size a multiple of a word. */
3243 #ifndef PUSH_ARGS_REVERSED
3244 for (i = not_stack; i < size; i++)
3245 #else
3246 for (i = size - 1; i >= not_stack; i--)
3247 #endif
3248 if (i >= not_stack + offset)
3249 emit_push_insn (operand_subword_force (x, i, mode),
3250 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3251 0, args_addr,
3252 GEN_INT (args_offset + ((i - not_stack + skip)
3253 * UNITS_PER_WORD)),
3254 reg_parm_stack_space, alignment_pad);
3255 }
3256 else
3257 {
3258 rtx addr;
3259 rtx target = NULL_RTX;
3260
3261 /* Push padding now if padding above and stack grows down,
3262 or if padding below and stack grows up.
3263 But if space already allocated, this has already been done. */
3264 if (extra && args_addr == 0
3265 && where_pad != none && where_pad != stack_direction)
3266 anti_adjust_stack (GEN_INT (extra));
3267
3268 #ifdef PUSH_ROUNDING
3269 if (args_addr == 0 && PUSH_ARGS)
3270 {
3271 addr = gen_push_operand ();
3272 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3273 }
3274 else
3275 #endif
3276 {
3277 if (GET_CODE (args_so_far) == CONST_INT)
3278 addr
3279 = memory_address (mode,
3280 plus_constant (args_addr,
3281 INTVAL (args_so_far)));
3282 else
3283 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3284 args_so_far));
3285 target = addr;
3286 }
3287
3288 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3289
3290 if (current_function_check_memory_usage && ! in_check_memory_usage)
3291 {
3292 in_check_memory_usage = 1;
3293 if (target == 0)
3294 target = get_push_address (GET_MODE_SIZE (mode));
3295
3296 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3297 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3298 target, Pmode,
3299 XEXP (x, 0), Pmode,
3300 GEN_INT (GET_MODE_SIZE (mode)),
3301 TYPE_MODE (sizetype));
3302 else
3303 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3304 target, Pmode,
3305 GEN_INT (GET_MODE_SIZE (mode)),
3306 TYPE_MODE (sizetype),
3307 GEN_INT (MEMORY_USE_RW),
3308 TYPE_MODE (integer_type_node));
3309 in_check_memory_usage = 0;
3310 }
3311 }
3312
3313 ret:
3314 /* If part should go in registers, copy that part
3315 into the appropriate registers. Do this now, at the end,
3316 since mem-to-mem copies above may do function calls. */
3317 if (partial > 0 && reg != 0)
3318 {
3319 /* Handle calls that pass values in multiple non-contiguous locations.
3320 The Irix 6 ABI has examples of this. */
3321 if (GET_CODE (reg) == PARALLEL)
3322 emit_group_load (reg, x, -1, align); /* ??? size? */
3323 else
3324 move_block_to_reg (REGNO (reg), x, partial, mode);
3325 }
3326
3327 if (extra && args_addr == 0 && where_pad == stack_direction)
3328 anti_adjust_stack (GEN_INT (extra));
3329
3330 if (alignment_pad)
3331 anti_adjust_stack (alignment_pad);
3332 }
3333 \f
3334 /* Expand an assignment that stores the value of FROM into TO.
3335 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3336 (This may contain a QUEUED rtx;
3337 if the value is constant, this rtx is a constant.)
3338 Otherwise, the returned value is NULL_RTX.
3339
3340 SUGGEST_REG is no longer actually used.
3341 It used to mean, copy the value through a register
3342 and return that register, if that is possible.
3343 We now use WANT_VALUE to decide whether to do this. */
3344
3345 rtx
3346 expand_assignment (to, from, want_value, suggest_reg)
3347 tree to, from;
3348 int want_value;
3349 int suggest_reg ATTRIBUTE_UNUSED;
3350 {
3351 register rtx to_rtx = 0;
3352 rtx result;
3353
3354 /* Don't crash if the lhs of the assignment was erroneous. */
3355
3356 if (TREE_CODE (to) == ERROR_MARK)
3357 {
3358 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3359 return want_value ? result : NULL_RTX;
3360 }
3361
3362 /* Assignment of a structure component needs special treatment
3363 if the structure component's rtx is not simply a MEM.
3364 Assignment of an array element at a constant index, and assignment of
3365 an array element in an unaligned packed structure field, has the same
3366 problem. */
3367
3368 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3369 || TREE_CODE (to) == ARRAY_REF)
3370 {
3371 enum machine_mode mode1;
3372 HOST_WIDE_INT bitsize, bitpos;
3373 tree offset;
3374 int unsignedp;
3375 int volatilep = 0;
3376 tree tem;
3377 unsigned int alignment;
3378
3379 push_temp_slots ();
3380 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3381 &unsignedp, &volatilep, &alignment);
3382
3383 /* If we are going to use store_bit_field and extract_bit_field,
3384 make sure to_rtx will be safe for multiple use. */
3385
3386 if (mode1 == VOIDmode && want_value)
3387 tem = stabilize_reference (tem);
3388
3389 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3390 if (offset != 0)
3391 {
3392 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3393
3394 if (GET_CODE (to_rtx) != MEM)
3395 abort ();
3396
3397 if (GET_MODE (offset_rtx) != ptr_mode)
3398 {
3399 #ifdef POINTERS_EXTEND_UNSIGNED
3400 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3401 #else
3402 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3403 #endif
3404 }
3405
3406 /* A constant address in TO_RTX can have VOIDmode, we must not try
3407 to call force_reg for that case. Avoid that case. */
3408 if (GET_CODE (to_rtx) == MEM
3409 && GET_MODE (to_rtx) == BLKmode
3410 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3411 && bitsize
3412 && (bitpos % bitsize) == 0
3413 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3414 && alignment == GET_MODE_ALIGNMENT (mode1))
3415 {
3416 rtx temp = change_address (to_rtx, mode1,
3417 plus_constant (XEXP (to_rtx, 0),
3418 (bitpos /
3419 BITS_PER_UNIT)));
3420 if (GET_CODE (XEXP (temp, 0)) == REG)
3421 to_rtx = temp;
3422 else
3423 to_rtx = change_address (to_rtx, mode1,
3424 force_reg (GET_MODE (XEXP (temp, 0)),
3425 XEXP (temp, 0)));
3426 bitpos = 0;
3427 }
3428
3429 to_rtx = change_address (to_rtx, VOIDmode,
3430 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3431 force_reg (ptr_mode,
3432 offset_rtx)));
3433 }
3434
3435 if (volatilep)
3436 {
3437 if (GET_CODE (to_rtx) == MEM)
3438 {
3439 /* When the offset is zero, to_rtx is the address of the
3440 structure we are storing into, and hence may be shared.
3441 We must make a new MEM before setting the volatile bit. */
3442 if (offset == 0)
3443 to_rtx = copy_rtx (to_rtx);
3444
3445 MEM_VOLATILE_P (to_rtx) = 1;
3446 }
3447 #if 0 /* This was turned off because, when a field is volatile
3448 in an object which is not volatile, the object may be in a register,
3449 and then we would abort over here. */
3450 else
3451 abort ();
3452 #endif
3453 }
3454
3455 if (TREE_CODE (to) == COMPONENT_REF
3456 && TREE_READONLY (TREE_OPERAND (to, 1)))
3457 {
3458 if (offset == 0)
3459 to_rtx = copy_rtx (to_rtx);
3460
3461 RTX_UNCHANGING_P (to_rtx) = 1;
3462 }
3463
3464 /* Check the access. */
3465 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3466 {
3467 rtx to_addr;
3468 int size;
3469 int best_mode_size;
3470 enum machine_mode best_mode;
3471
3472 best_mode = get_best_mode (bitsize, bitpos,
3473 TYPE_ALIGN (TREE_TYPE (tem)),
3474 mode1, volatilep);
3475 if (best_mode == VOIDmode)
3476 best_mode = QImode;
3477
3478 best_mode_size = GET_MODE_BITSIZE (best_mode);
3479 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3480 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3481 size *= GET_MODE_SIZE (best_mode);
3482
3483 /* Check the access right of the pointer. */
3484 if (size)
3485 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3486 to_addr, Pmode,
3487 GEN_INT (size), TYPE_MODE (sizetype),
3488 GEN_INT (MEMORY_USE_WO),
3489 TYPE_MODE (integer_type_node));
3490 }
3491
3492 /* If this is a varying-length object, we must get the address of
3493 the source and do an explicit block move. */
3494 if (bitsize < 0)
3495 {
3496 unsigned int from_align;
3497 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3498 rtx inner_to_rtx
3499 = change_address (to_rtx, VOIDmode,
3500 plus_constant (XEXP (to_rtx, 0),
3501 bitpos / BITS_PER_UNIT));
3502
3503 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3504 MIN (alignment, from_align));
3505 free_temp_slots ();
3506 pop_temp_slots ();
3507 return to_rtx;
3508 }
3509 else
3510 {
3511 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3512 (want_value
3513 /* Spurious cast for HPUX compiler. */
3514 ? ((enum machine_mode)
3515 TYPE_MODE (TREE_TYPE (to)))
3516 : VOIDmode),
3517 unsignedp,
3518 alignment,
3519 int_size_in_bytes (TREE_TYPE (tem)),
3520 get_alias_set (to));
3521
3522 preserve_temp_slots (result);
3523 free_temp_slots ();
3524 pop_temp_slots ();
3525
3526 /* If the value is meaningful, convert RESULT to the proper mode.
3527 Otherwise, return nothing. */
3528 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3529 TYPE_MODE (TREE_TYPE (from)),
3530 result,
3531 TREE_UNSIGNED (TREE_TYPE (to)))
3532 : NULL_RTX);
3533 }
3534 }
3535
3536 /* If the rhs is a function call and its value is not an aggregate,
3537 call the function before we start to compute the lhs.
3538 This is needed for correct code for cases such as
3539 val = setjmp (buf) on machines where reference to val
3540 requires loading up part of an address in a separate insn.
3541
3542 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3543 since it might be a promoted variable where the zero- or sign- extension
3544 needs to be done. Handling this in the normal way is safe because no
3545 computation is done before the call. */
3546 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3547 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3548 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3549 && GET_CODE (DECL_RTL (to)) == REG))
3550 {
3551 rtx value;
3552
3553 push_temp_slots ();
3554 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3555 if (to_rtx == 0)
3556 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3557
3558 /* Handle calls that return values in multiple non-contiguous locations.
3559 The Irix 6 ABI has examples of this. */
3560 if (GET_CODE (to_rtx) == PARALLEL)
3561 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3562 TYPE_ALIGN (TREE_TYPE (from)));
3563 else if (GET_MODE (to_rtx) == BLKmode)
3564 emit_block_move (to_rtx, value, expr_size (from),
3565 TYPE_ALIGN (TREE_TYPE (from)));
3566 else
3567 {
3568 #ifdef POINTERS_EXTEND_UNSIGNED
3569 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3570 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3571 value = convert_memory_address (GET_MODE (to_rtx), value);
3572 #endif
3573 emit_move_insn (to_rtx, value);
3574 }
3575 preserve_temp_slots (to_rtx);
3576 free_temp_slots ();
3577 pop_temp_slots ();
3578 return want_value ? to_rtx : NULL_RTX;
3579 }
3580
3581 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3582 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3583
3584 if (to_rtx == 0)
3585 {
3586 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3587 if (GET_CODE (to_rtx) == MEM)
3588 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3589 }
3590
3591 /* Don't move directly into a return register. */
3592 if (TREE_CODE (to) == RESULT_DECL
3593 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3594 {
3595 rtx temp;
3596
3597 push_temp_slots ();
3598 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3599
3600 if (GET_CODE (to_rtx) == PARALLEL)
3601 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3602 TYPE_ALIGN (TREE_TYPE (from)));
3603 else
3604 emit_move_insn (to_rtx, temp);
3605
3606 preserve_temp_slots (to_rtx);
3607 free_temp_slots ();
3608 pop_temp_slots ();
3609 return want_value ? to_rtx : NULL_RTX;
3610 }
3611
3612 /* In case we are returning the contents of an object which overlaps
3613 the place the value is being stored, use a safe function when copying
3614 a value through a pointer into a structure value return block. */
3615 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3616 && current_function_returns_struct
3617 && !current_function_returns_pcc_struct)
3618 {
3619 rtx from_rtx, size;
3620
3621 push_temp_slots ();
3622 size = expr_size (from);
3623 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3624 EXPAND_MEMORY_USE_DONT);
3625
3626 /* Copy the rights of the bitmap. */
3627 if (current_function_check_memory_usage)
3628 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3629 XEXP (to_rtx, 0), Pmode,
3630 XEXP (from_rtx, 0), Pmode,
3631 convert_to_mode (TYPE_MODE (sizetype),
3632 size, TREE_UNSIGNED (sizetype)),
3633 TYPE_MODE (sizetype));
3634
3635 #ifdef TARGET_MEM_FUNCTIONS
3636 emit_library_call (memcpy_libfunc, 0,
3637 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3638 XEXP (from_rtx, 0), Pmode,
3639 convert_to_mode (TYPE_MODE (sizetype),
3640 size, TREE_UNSIGNED (sizetype)),
3641 TYPE_MODE (sizetype));
3642 #else
3643 emit_library_call (bcopy_libfunc, 0,
3644 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3645 XEXP (to_rtx, 0), Pmode,
3646 convert_to_mode (TYPE_MODE (integer_type_node),
3647 size, TREE_UNSIGNED (integer_type_node)),
3648 TYPE_MODE (integer_type_node));
3649 #endif
3650
3651 preserve_temp_slots (to_rtx);
3652 free_temp_slots ();
3653 pop_temp_slots ();
3654 return want_value ? to_rtx : NULL_RTX;
3655 }
3656
3657 /* Compute FROM and store the value in the rtx we got. */
3658
3659 push_temp_slots ();
3660 result = store_expr (from, to_rtx, want_value);
3661 preserve_temp_slots (result);
3662 free_temp_slots ();
3663 pop_temp_slots ();
3664 return want_value ? result : NULL_RTX;
3665 }
3666
3667 /* Generate code for computing expression EXP,
3668 and storing the value into TARGET.
3669 TARGET may contain a QUEUED rtx.
3670
3671 If WANT_VALUE is nonzero, return a copy of the value
3672 not in TARGET, so that we can be sure to use the proper
3673 value in a containing expression even if TARGET has something
3674 else stored in it. If possible, we copy the value through a pseudo
3675 and return that pseudo. Or, if the value is constant, we try to
3676 return the constant. In some cases, we return a pseudo
3677 copied *from* TARGET.
3678
3679 If the mode is BLKmode then we may return TARGET itself.
3680 It turns out that in BLKmode it doesn't cause a problem.
3681 because C has no operators that could combine two different
3682 assignments into the same BLKmode object with different values
3683 with no sequence point. Will other languages need this to
3684 be more thorough?
3685
3686 If WANT_VALUE is 0, we return NULL, to make sure
3687 to catch quickly any cases where the caller uses the value
3688 and fails to set WANT_VALUE. */
3689
3690 rtx
3691 store_expr (exp, target, want_value)
3692 register tree exp;
3693 register rtx target;
3694 int want_value;
3695 {
3696 register rtx temp;
3697 int dont_return_target = 0;
3698
3699 if (TREE_CODE (exp) == COMPOUND_EXPR)
3700 {
3701 /* Perform first part of compound expression, then assign from second
3702 part. */
3703 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3704 emit_queue ();
3705 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3706 }
3707 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3708 {
3709 /* For conditional expression, get safe form of the target. Then
3710 test the condition, doing the appropriate assignment on either
3711 side. This avoids the creation of unnecessary temporaries.
3712 For non-BLKmode, it is more efficient not to do this. */
3713
3714 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3715
3716 emit_queue ();
3717 target = protect_from_queue (target, 1);
3718
3719 do_pending_stack_adjust ();
3720 NO_DEFER_POP;
3721 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3722 start_cleanup_deferral ();
3723 store_expr (TREE_OPERAND (exp, 1), target, 0);
3724 end_cleanup_deferral ();
3725 emit_queue ();
3726 emit_jump_insn (gen_jump (lab2));
3727 emit_barrier ();
3728 emit_label (lab1);
3729 start_cleanup_deferral ();
3730 store_expr (TREE_OPERAND (exp, 2), target, 0);
3731 end_cleanup_deferral ();
3732 emit_queue ();
3733 emit_label (lab2);
3734 OK_DEFER_POP;
3735
3736 return want_value ? target : NULL_RTX;
3737 }
3738 else if (queued_subexp_p (target))
3739 /* If target contains a postincrement, let's not risk
3740 using it as the place to generate the rhs. */
3741 {
3742 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3743 {
3744 /* Expand EXP into a new pseudo. */
3745 temp = gen_reg_rtx (GET_MODE (target));
3746 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3747 }
3748 else
3749 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3750
3751 /* If target is volatile, ANSI requires accessing the value
3752 *from* the target, if it is accessed. So make that happen.
3753 In no case return the target itself. */
3754 if (! MEM_VOLATILE_P (target) && want_value)
3755 dont_return_target = 1;
3756 }
3757 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3758 && GET_MODE (target) != BLKmode)
3759 /* If target is in memory and caller wants value in a register instead,
3760 arrange that. Pass TARGET as target for expand_expr so that,
3761 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3762 We know expand_expr will not use the target in that case.
3763 Don't do this if TARGET is volatile because we are supposed
3764 to write it and then read it. */
3765 {
3766 temp = expand_expr (exp, target, GET_MODE (target), 0);
3767 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3768 temp = copy_to_reg (temp);
3769 dont_return_target = 1;
3770 }
3771 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3772 /* If this is an scalar in a register that is stored in a wider mode
3773 than the declared mode, compute the result into its declared mode
3774 and then convert to the wider mode. Our value is the computed
3775 expression. */
3776 {
3777 /* If we don't want a value, we can do the conversion inside EXP,
3778 which will often result in some optimizations. Do the conversion
3779 in two steps: first change the signedness, if needed, then
3780 the extend. But don't do this if the type of EXP is a subtype
3781 of something else since then the conversion might involve
3782 more than just converting modes. */
3783 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3784 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3785 {
3786 if (TREE_UNSIGNED (TREE_TYPE (exp))
3787 != SUBREG_PROMOTED_UNSIGNED_P (target))
3788 exp
3789 = convert
3790 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3791 TREE_TYPE (exp)),
3792 exp);
3793
3794 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3795 SUBREG_PROMOTED_UNSIGNED_P (target)),
3796 exp);
3797 }
3798
3799 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3800
3801 /* If TEMP is a volatile MEM and we want a result value, make
3802 the access now so it gets done only once. Likewise if
3803 it contains TARGET. */
3804 if (GET_CODE (temp) == MEM && want_value
3805 && (MEM_VOLATILE_P (temp)
3806 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3807 temp = copy_to_reg (temp);
3808
3809 /* If TEMP is a VOIDmode constant, use convert_modes to make
3810 sure that we properly convert it. */
3811 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3812 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3813 TYPE_MODE (TREE_TYPE (exp)), temp,
3814 SUBREG_PROMOTED_UNSIGNED_P (target));
3815
3816 convert_move (SUBREG_REG (target), temp,
3817 SUBREG_PROMOTED_UNSIGNED_P (target));
3818
3819 /* If we promoted a constant, change the mode back down to match
3820 target. Otherwise, the caller might get confused by a result whose
3821 mode is larger than expected. */
3822
3823 if (want_value && GET_MODE (temp) != GET_MODE (target)
3824 && GET_MODE (temp) != VOIDmode)
3825 {
3826 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3827 SUBREG_PROMOTED_VAR_P (temp) = 1;
3828 SUBREG_PROMOTED_UNSIGNED_P (temp)
3829 = SUBREG_PROMOTED_UNSIGNED_P (target);
3830 }
3831
3832 return want_value ? temp : NULL_RTX;
3833 }
3834 else
3835 {
3836 temp = expand_expr (exp, target, GET_MODE (target), 0);
3837 /* Return TARGET if it's a specified hardware register.
3838 If TARGET is a volatile mem ref, either return TARGET
3839 or return a reg copied *from* TARGET; ANSI requires this.
3840
3841 Otherwise, if TEMP is not TARGET, return TEMP
3842 if it is constant (for efficiency),
3843 or if we really want the correct value. */
3844 if (!(target && GET_CODE (target) == REG
3845 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3846 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3847 && ! rtx_equal_p (temp, target)
3848 && (CONSTANT_P (temp) || want_value))
3849 dont_return_target = 1;
3850 }
3851
3852 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3853 the same as that of TARGET, adjust the constant. This is needed, for
3854 example, in case it is a CONST_DOUBLE and we want only a word-sized
3855 value. */
3856 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3857 && TREE_CODE (exp) != ERROR_MARK
3858 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3859 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3860 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3861
3862 if (current_function_check_memory_usage
3863 && GET_CODE (target) == MEM
3864 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3865 {
3866 if (GET_CODE (temp) == MEM)
3867 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3868 XEXP (target, 0), Pmode,
3869 XEXP (temp, 0), Pmode,
3870 expr_size (exp), TYPE_MODE (sizetype));
3871 else
3872 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3873 XEXP (target, 0), Pmode,
3874 expr_size (exp), TYPE_MODE (sizetype),
3875 GEN_INT (MEMORY_USE_WO),
3876 TYPE_MODE (integer_type_node));
3877 }
3878
3879 /* If value was not generated in the target, store it there.
3880 Convert the value to TARGET's type first if nec. */
3881 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3882 one or both of them are volatile memory refs, we have to distinguish
3883 two cases:
3884 - expand_expr has used TARGET. In this case, we must not generate
3885 another copy. This can be detected by TARGET being equal according
3886 to == .
3887 - expand_expr has not used TARGET - that means that the source just
3888 happens to have the same RTX form. Since temp will have been created
3889 by expand_expr, it will compare unequal according to == .
3890 We must generate a copy in this case, to reach the correct number
3891 of volatile memory references. */
3892
3893 if ((! rtx_equal_p (temp, target)
3894 || (temp != target && (side_effects_p (temp)
3895 || side_effects_p (target))))
3896 && TREE_CODE (exp) != ERROR_MARK)
3897 {
3898 target = protect_from_queue (target, 1);
3899 if (GET_MODE (temp) != GET_MODE (target)
3900 && GET_MODE (temp) != VOIDmode)
3901 {
3902 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3903 if (dont_return_target)
3904 {
3905 /* In this case, we will return TEMP,
3906 so make sure it has the proper mode.
3907 But don't forget to store the value into TARGET. */
3908 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3909 emit_move_insn (target, temp);
3910 }
3911 else
3912 convert_move (target, temp, unsignedp);
3913 }
3914
3915 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3916 {
3917 /* Handle copying a string constant into an array.
3918 The string constant may be shorter than the array.
3919 So copy just the string's actual length, and clear the rest. */
3920 rtx size;
3921 rtx addr;
3922
3923 /* Get the size of the data type of the string,
3924 which is actually the size of the target. */
3925 size = expr_size (exp);
3926 if (GET_CODE (size) == CONST_INT
3927 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3928 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3929 else
3930 {
3931 /* Compute the size of the data to copy from the string. */
3932 tree copy_size
3933 = size_binop (MIN_EXPR,
3934 make_tree (sizetype, size),
3935 size_int (TREE_STRING_LENGTH (exp)));
3936 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3937 VOIDmode, 0);
3938 rtx label = 0;
3939
3940 /* Copy that much. */
3941 emit_block_move (target, temp, copy_size_rtx,
3942 TYPE_ALIGN (TREE_TYPE (exp)));
3943
3944 /* Figure out how much is left in TARGET that we have to clear.
3945 Do all calculations in ptr_mode. */
3946
3947 addr = XEXP (target, 0);
3948 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3949
3950 if (GET_CODE (copy_size_rtx) == CONST_INT)
3951 {
3952 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3953 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3954 }
3955 else
3956 {
3957 addr = force_reg (ptr_mode, addr);
3958 addr = expand_binop (ptr_mode, add_optab, addr,
3959 copy_size_rtx, NULL_RTX, 0,
3960 OPTAB_LIB_WIDEN);
3961
3962 size = expand_binop (ptr_mode, sub_optab, size,
3963 copy_size_rtx, NULL_RTX, 0,
3964 OPTAB_LIB_WIDEN);
3965
3966 label = gen_label_rtx ();
3967 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3968 GET_MODE (size), 0, 0, label);
3969 }
3970
3971 if (size != const0_rtx)
3972 {
3973 /* Be sure we can write on ADDR. */
3974 if (current_function_check_memory_usage)
3975 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3976 addr, Pmode,
3977 size, TYPE_MODE (sizetype),
3978 GEN_INT (MEMORY_USE_WO),
3979 TYPE_MODE (integer_type_node));
3980 #ifdef TARGET_MEM_FUNCTIONS
3981 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3982 addr, ptr_mode,
3983 const0_rtx, TYPE_MODE (integer_type_node),
3984 convert_to_mode (TYPE_MODE (sizetype),
3985 size,
3986 TREE_UNSIGNED (sizetype)),
3987 TYPE_MODE (sizetype));
3988 #else
3989 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3990 addr, ptr_mode,
3991 convert_to_mode (TYPE_MODE (integer_type_node),
3992 size,
3993 TREE_UNSIGNED (integer_type_node)),
3994 TYPE_MODE (integer_type_node));
3995 #endif
3996 }
3997
3998 if (label)
3999 emit_label (label);
4000 }
4001 }
4002 /* Handle calls that return values in multiple non-contiguous locations.
4003 The Irix 6 ABI has examples of this. */
4004 else if (GET_CODE (target) == PARALLEL)
4005 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4006 TYPE_ALIGN (TREE_TYPE (exp)));
4007 else if (GET_MODE (temp) == BLKmode)
4008 emit_block_move (target, temp, expr_size (exp),
4009 TYPE_ALIGN (TREE_TYPE (exp)));
4010 else
4011 emit_move_insn (target, temp);
4012 }
4013
4014 /* If we don't want a value, return NULL_RTX. */
4015 if (! want_value)
4016 return NULL_RTX;
4017
4018 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4019 ??? The latter test doesn't seem to make sense. */
4020 else if (dont_return_target && GET_CODE (temp) != MEM)
4021 return temp;
4022
4023 /* Return TARGET itself if it is a hard register. */
4024 else if (want_value && GET_MODE (target) != BLKmode
4025 && ! (GET_CODE (target) == REG
4026 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4027 return copy_to_reg (target);
4028
4029 else
4030 return target;
4031 }
4032 \f
4033 /* Return 1 if EXP just contains zeros. */
4034
4035 static int
4036 is_zeros_p (exp)
4037 tree exp;
4038 {
4039 tree elt;
4040
4041 switch (TREE_CODE (exp))
4042 {
4043 case CONVERT_EXPR:
4044 case NOP_EXPR:
4045 case NON_LVALUE_EXPR:
4046 return is_zeros_p (TREE_OPERAND (exp, 0));
4047
4048 case INTEGER_CST:
4049 return integer_zerop (exp);
4050
4051 case COMPLEX_CST:
4052 return
4053 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4054
4055 case REAL_CST:
4056 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4057
4058 case CONSTRUCTOR:
4059 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4060 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4061 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4062 if (! is_zeros_p (TREE_VALUE (elt)))
4063 return 0;
4064
4065 return 1;
4066
4067 default:
4068 return 0;
4069 }
4070 }
4071
4072 /* Return 1 if EXP contains mostly (3/4) zeros. */
4073
4074 static int
4075 mostly_zeros_p (exp)
4076 tree exp;
4077 {
4078 if (TREE_CODE (exp) == CONSTRUCTOR)
4079 {
4080 int elts = 0, zeros = 0;
4081 tree elt = CONSTRUCTOR_ELTS (exp);
4082 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4083 {
4084 /* If there are no ranges of true bits, it is all zero. */
4085 return elt == NULL_TREE;
4086 }
4087 for (; elt; elt = TREE_CHAIN (elt))
4088 {
4089 /* We do not handle the case where the index is a RANGE_EXPR,
4090 so the statistic will be somewhat inaccurate.
4091 We do make a more accurate count in store_constructor itself,
4092 so since this function is only used for nested array elements,
4093 this should be close enough. */
4094 if (mostly_zeros_p (TREE_VALUE (elt)))
4095 zeros++;
4096 elts++;
4097 }
4098
4099 return 4 * zeros >= 3 * elts;
4100 }
4101
4102 return is_zeros_p (exp);
4103 }
4104 \f
4105 /* Helper function for store_constructor.
4106 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4107 TYPE is the type of the CONSTRUCTOR, not the element type.
4108 ALIGN and CLEARED are as for store_constructor.
4109
4110 This provides a recursive shortcut back to store_constructor when it isn't
4111 necessary to go through store_field. This is so that we can pass through
4112 the cleared field to let store_constructor know that we may not have to
4113 clear a substructure if the outer structure has already been cleared. */
4114
4115 static void
4116 store_constructor_field (target, bitsize, bitpos,
4117 mode, exp, type, align, cleared)
4118 rtx target;
4119 unsigned HOST_WIDE_INT bitsize;
4120 HOST_WIDE_INT bitpos;
4121 enum machine_mode mode;
4122 tree exp, type;
4123 unsigned int align;
4124 int cleared;
4125 {
4126 if (TREE_CODE (exp) == CONSTRUCTOR
4127 && bitpos % BITS_PER_UNIT == 0
4128 /* If we have a non-zero bitpos for a register target, then we just
4129 let store_field do the bitfield handling. This is unlikely to
4130 generate unnecessary clear instructions anyways. */
4131 && (bitpos == 0 || GET_CODE (target) == MEM))
4132 {
4133 if (bitpos != 0)
4134 target
4135 = change_address (target,
4136 GET_MODE (target) == BLKmode
4137 || 0 != (bitpos
4138 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4139 ? BLKmode : VOIDmode,
4140 plus_constant (XEXP (target, 0),
4141 bitpos / BITS_PER_UNIT));
4142 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4143 }
4144 else
4145 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4146 int_size_in_bytes (type), 0);
4147 }
4148
4149 /* Store the value of constructor EXP into the rtx TARGET.
4150 TARGET is either a REG or a MEM.
4151 ALIGN is the maximum known alignment for TARGET.
4152 CLEARED is true if TARGET is known to have been zero'd.
4153 SIZE is the number of bytes of TARGET we are allowed to modify: this
4154 may not be the same as the size of EXP if we are assigning to a field
4155 which has been packed to exclude padding bits. */
4156
4157 static void
4158 store_constructor (exp, target, align, cleared, size)
4159 tree exp;
4160 rtx target;
4161 unsigned int align;
4162 int cleared;
4163 HOST_WIDE_INT size;
4164 {
4165 tree type = TREE_TYPE (exp);
4166 #ifdef WORD_REGISTER_OPERATIONS
4167 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4168 #endif
4169
4170 /* We know our target cannot conflict, since safe_from_p has been called. */
4171 #if 0
4172 /* Don't try copying piece by piece into a hard register
4173 since that is vulnerable to being clobbered by EXP.
4174 Instead, construct in a pseudo register and then copy it all. */
4175 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4176 {
4177 rtx temp = gen_reg_rtx (GET_MODE (target));
4178 store_constructor (exp, temp, align, cleared, size);
4179 emit_move_insn (target, temp);
4180 return;
4181 }
4182 #endif
4183
4184 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4185 || TREE_CODE (type) == QUAL_UNION_TYPE)
4186 {
4187 register tree elt;
4188
4189 /* Inform later passes that the whole union value is dead. */
4190 if ((TREE_CODE (type) == UNION_TYPE
4191 || TREE_CODE (type) == QUAL_UNION_TYPE)
4192 && ! cleared)
4193 {
4194 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4195
4196 /* If the constructor is empty, clear the union. */
4197 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4198 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4199 }
4200
4201 /* If we are building a static constructor into a register,
4202 set the initial value as zero so we can fold the value into
4203 a constant. But if more than one register is involved,
4204 this probably loses. */
4205 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4206 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4207 {
4208 if (! cleared)
4209 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4210
4211 cleared = 1;
4212 }
4213
4214 /* If the constructor has fewer fields than the structure
4215 or if we are initializing the structure to mostly zeros,
4216 clear the whole structure first. */
4217 else if (size > 0
4218 && ((list_length (CONSTRUCTOR_ELTS (exp))
4219 != fields_length (type))
4220 || mostly_zeros_p (exp)))
4221 {
4222 if (! cleared)
4223 clear_storage (target, GEN_INT (size), align);
4224
4225 cleared = 1;
4226 }
4227 else if (! cleared)
4228 /* Inform later passes that the old value is dead. */
4229 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4230
4231 /* Store each element of the constructor into
4232 the corresponding field of TARGET. */
4233
4234 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4235 {
4236 register tree field = TREE_PURPOSE (elt);
4237 #ifdef WORD_REGISTER_OPERATIONS
4238 tree value = TREE_VALUE (elt);
4239 #endif
4240 register enum machine_mode mode;
4241 HOST_WIDE_INT bitsize;
4242 HOST_WIDE_INT bitpos = 0;
4243 int unsignedp;
4244 tree offset;
4245 rtx to_rtx = target;
4246
4247 /* Just ignore missing fields.
4248 We cleared the whole structure, above,
4249 if any fields are missing. */
4250 if (field == 0)
4251 continue;
4252
4253 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4254 continue;
4255
4256 if (host_integerp (DECL_SIZE (field), 1))
4257 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4258 else
4259 bitsize = -1;
4260
4261 unsignedp = TREE_UNSIGNED (field);
4262 mode = DECL_MODE (field);
4263 if (DECL_BIT_FIELD (field))
4264 mode = VOIDmode;
4265
4266 offset = DECL_FIELD_OFFSET (field);
4267 if (host_integerp (offset, 0)
4268 && host_integerp (bit_position (field), 0))
4269 {
4270 bitpos = int_bit_position (field);
4271 offset = 0;
4272 }
4273 else
4274 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4275
4276 if (offset)
4277 {
4278 rtx offset_rtx;
4279
4280 if (contains_placeholder_p (offset))
4281 offset = build (WITH_RECORD_EXPR, sizetype,
4282 offset, make_tree (TREE_TYPE (exp), target));
4283
4284 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4285 if (GET_CODE (to_rtx) != MEM)
4286 abort ();
4287
4288 if (GET_MODE (offset_rtx) != ptr_mode)
4289 {
4290 #ifdef POINTERS_EXTEND_UNSIGNED
4291 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4292 #else
4293 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4294 #endif
4295 }
4296
4297 to_rtx
4298 = change_address (to_rtx, VOIDmode,
4299 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4300 force_reg (ptr_mode,
4301 offset_rtx)));
4302 align = DECL_OFFSET_ALIGN (field);
4303 }
4304
4305 if (TREE_READONLY (field))
4306 {
4307 if (GET_CODE (to_rtx) == MEM)
4308 to_rtx = copy_rtx (to_rtx);
4309
4310 RTX_UNCHANGING_P (to_rtx) = 1;
4311 }
4312
4313 #ifdef WORD_REGISTER_OPERATIONS
4314 /* If this initializes a field that is smaller than a word, at the
4315 start of a word, try to widen it to a full word.
4316 This special case allows us to output C++ member function
4317 initializations in a form that the optimizers can understand. */
4318 if (GET_CODE (target) == REG
4319 && bitsize < BITS_PER_WORD
4320 && bitpos % BITS_PER_WORD == 0
4321 && GET_MODE_CLASS (mode) == MODE_INT
4322 && TREE_CODE (value) == INTEGER_CST
4323 && exp_size >= 0
4324 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4325 {
4326 tree type = TREE_TYPE (value);
4327 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4328 {
4329 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4330 value = convert (type, value);
4331 }
4332 if (BYTES_BIG_ENDIAN)
4333 value
4334 = fold (build (LSHIFT_EXPR, type, value,
4335 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4336 bitsize = BITS_PER_WORD;
4337 mode = word_mode;
4338 }
4339 #endif
4340 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4341 TREE_VALUE (elt), type, align, cleared);
4342 }
4343 }
4344 else if (TREE_CODE (type) == ARRAY_TYPE)
4345 {
4346 register tree elt;
4347 register int i;
4348 int need_to_clear;
4349 tree domain = TYPE_DOMAIN (type);
4350 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4351 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4352 tree elttype = TREE_TYPE (type);
4353
4354 /* If the constructor has fewer elements than the array,
4355 clear the whole array first. Similarly if this is
4356 static constructor of a non-BLKmode object. */
4357 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4358 need_to_clear = 1;
4359 else
4360 {
4361 HOST_WIDE_INT count = 0, zero_count = 0;
4362 need_to_clear = 0;
4363 /* This loop is a more accurate version of the loop in
4364 mostly_zeros_p (it handles RANGE_EXPR in an index).
4365 It is also needed to check for missing elements. */
4366 for (elt = CONSTRUCTOR_ELTS (exp);
4367 elt != NULL_TREE;
4368 elt = TREE_CHAIN (elt))
4369 {
4370 tree index = TREE_PURPOSE (elt);
4371 HOST_WIDE_INT this_node_count;
4372
4373 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4374 {
4375 tree lo_index = TREE_OPERAND (index, 0);
4376 tree hi_index = TREE_OPERAND (index, 1);
4377
4378 if (! host_integerp (lo_index, 1)
4379 || ! host_integerp (hi_index, 1))
4380 {
4381 need_to_clear = 1;
4382 break;
4383 }
4384
4385 this_node_count = (tree_low_cst (hi_index, 1)
4386 - tree_low_cst (lo_index, 1) + 1);
4387 }
4388 else
4389 this_node_count = 1;
4390 count += this_node_count;
4391 if (mostly_zeros_p (TREE_VALUE (elt)))
4392 zero_count += this_node_count;
4393 }
4394 /* Clear the entire array first if there are any missing elements,
4395 or if the incidence of zero elements is >= 75%. */
4396 if (count < maxelt - minelt + 1
4397 || 4 * zero_count >= 3 * count)
4398 need_to_clear = 1;
4399 }
4400 if (need_to_clear && size > 0)
4401 {
4402 if (! cleared)
4403 clear_storage (target, GEN_INT (size), align);
4404 cleared = 1;
4405 }
4406 else
4407 /* Inform later passes that the old value is dead. */
4408 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4409
4410 /* Store each element of the constructor into
4411 the corresponding element of TARGET, determined
4412 by counting the elements. */
4413 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4414 elt;
4415 elt = TREE_CHAIN (elt), i++)
4416 {
4417 register enum machine_mode mode;
4418 HOST_WIDE_INT bitsize;
4419 HOST_WIDE_INT bitpos;
4420 int unsignedp;
4421 tree value = TREE_VALUE (elt);
4422 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4423 tree index = TREE_PURPOSE (elt);
4424 rtx xtarget = target;
4425
4426 if (cleared && is_zeros_p (value))
4427 continue;
4428
4429 unsignedp = TREE_UNSIGNED (elttype);
4430 mode = TYPE_MODE (elttype);
4431 if (mode == BLKmode)
4432 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4433 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4434 : -1);
4435 else
4436 bitsize = GET_MODE_BITSIZE (mode);
4437
4438 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4439 {
4440 tree lo_index = TREE_OPERAND (index, 0);
4441 tree hi_index = TREE_OPERAND (index, 1);
4442 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4443 struct nesting *loop;
4444 HOST_WIDE_INT lo, hi, count;
4445 tree position;
4446
4447 /* If the range is constant and "small", unroll the loop. */
4448 if (host_integerp (lo_index, 0)
4449 && host_integerp (hi_index, 0)
4450 && (lo = tree_low_cst (lo_index, 0),
4451 hi = tree_low_cst (hi_index, 0),
4452 count = hi - lo + 1,
4453 (GET_CODE (target) != MEM
4454 || count <= 2
4455 || (host_integerp (TYPE_SIZE (elttype), 1)
4456 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4457 <= 40 * 8)))))
4458 {
4459 lo -= minelt; hi -= minelt;
4460 for (; lo <= hi; lo++)
4461 {
4462 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4463 store_constructor_field (target, bitsize, bitpos, mode,
4464 value, type, align, cleared);
4465 }
4466 }
4467 else
4468 {
4469 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4470 loop_top = gen_label_rtx ();
4471 loop_end = gen_label_rtx ();
4472
4473 unsignedp = TREE_UNSIGNED (domain);
4474
4475 index = build_decl (VAR_DECL, NULL_TREE, domain);
4476
4477 DECL_RTL (index) = index_r
4478 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4479 &unsignedp, 0));
4480
4481 if (TREE_CODE (value) == SAVE_EXPR
4482 && SAVE_EXPR_RTL (value) == 0)
4483 {
4484 /* Make sure value gets expanded once before the
4485 loop. */
4486 expand_expr (value, const0_rtx, VOIDmode, 0);
4487 emit_queue ();
4488 }
4489 store_expr (lo_index, index_r, 0);
4490 loop = expand_start_loop (0);
4491
4492 /* Assign value to element index. */
4493 position
4494 = convert (ssizetype,
4495 fold (build (MINUS_EXPR, TREE_TYPE (index),
4496 index, TYPE_MIN_VALUE (domain))));
4497 position = size_binop (MULT_EXPR, position,
4498 convert (ssizetype,
4499 TYPE_SIZE_UNIT (elttype)));
4500
4501 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4502 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4503 xtarget = change_address (target, mode, addr);
4504 if (TREE_CODE (value) == CONSTRUCTOR)
4505 store_constructor (value, xtarget, align, cleared,
4506 bitsize / BITS_PER_UNIT);
4507 else
4508 store_expr (value, xtarget, 0);
4509
4510 expand_exit_loop_if_false (loop,
4511 build (LT_EXPR, integer_type_node,
4512 index, hi_index));
4513
4514 expand_increment (build (PREINCREMENT_EXPR,
4515 TREE_TYPE (index),
4516 index, integer_one_node), 0, 0);
4517 expand_end_loop ();
4518 emit_label (loop_end);
4519 }
4520 }
4521 else if ((index != 0 && ! host_integerp (index, 0))
4522 || ! host_integerp (TYPE_SIZE (elttype), 1))
4523 {
4524 rtx pos_rtx, addr;
4525 tree position;
4526
4527 if (index == 0)
4528 index = ssize_int (1);
4529
4530 if (minelt)
4531 index = convert (ssizetype,
4532 fold (build (MINUS_EXPR, index,
4533 TYPE_MIN_VALUE (domain))));
4534
4535 position = size_binop (MULT_EXPR, index,
4536 convert (ssizetype,
4537 TYPE_SIZE_UNIT (elttype)));
4538 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4539 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4540 xtarget = change_address (target, mode, addr);
4541 store_expr (value, xtarget, 0);
4542 }
4543 else
4544 {
4545 if (index != 0)
4546 bitpos = ((tree_low_cst (index, 0) - minelt)
4547 * tree_low_cst (TYPE_SIZE (elttype), 1));
4548 else
4549 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4550
4551 store_constructor_field (target, bitsize, bitpos, mode, value,
4552 type, align, cleared);
4553 }
4554 }
4555 }
4556
4557 /* Set constructor assignments */
4558 else if (TREE_CODE (type) == SET_TYPE)
4559 {
4560 tree elt = CONSTRUCTOR_ELTS (exp);
4561 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4562 tree domain = TYPE_DOMAIN (type);
4563 tree domain_min, domain_max, bitlength;
4564
4565 /* The default implementation strategy is to extract the constant
4566 parts of the constructor, use that to initialize the target,
4567 and then "or" in whatever non-constant ranges we need in addition.
4568
4569 If a large set is all zero or all ones, it is
4570 probably better to set it using memset (if available) or bzero.
4571 Also, if a large set has just a single range, it may also be
4572 better to first clear all the first clear the set (using
4573 bzero/memset), and set the bits we want. */
4574
4575 /* Check for all zeros. */
4576 if (elt == NULL_TREE && size > 0)
4577 {
4578 if (!cleared)
4579 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4580 return;
4581 }
4582
4583 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4584 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4585 bitlength = size_binop (PLUS_EXPR,
4586 size_diffop (domain_max, domain_min),
4587 ssize_int (1));
4588
4589 nbits = tree_low_cst (bitlength, 1);
4590
4591 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4592 are "complicated" (more than one range), initialize (the
4593 constant parts) by copying from a constant. */
4594 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4595 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4596 {
4597 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4598 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4599 char *bit_buffer = (char *) alloca (nbits);
4600 HOST_WIDE_INT word = 0;
4601 unsigned int bit_pos = 0;
4602 unsigned int ibit = 0;
4603 unsigned int offset = 0; /* In bytes from beginning of set. */
4604
4605 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4606 for (;;)
4607 {
4608 if (bit_buffer[ibit])
4609 {
4610 if (BYTES_BIG_ENDIAN)
4611 word |= (1 << (set_word_size - 1 - bit_pos));
4612 else
4613 word |= 1 << bit_pos;
4614 }
4615
4616 bit_pos++; ibit++;
4617 if (bit_pos >= set_word_size || ibit == nbits)
4618 {
4619 if (word != 0 || ! cleared)
4620 {
4621 rtx datum = GEN_INT (word);
4622 rtx to_rtx;
4623
4624 /* The assumption here is that it is safe to use
4625 XEXP if the set is multi-word, but not if
4626 it's single-word. */
4627 if (GET_CODE (target) == MEM)
4628 {
4629 to_rtx = plus_constant (XEXP (target, 0), offset);
4630 to_rtx = change_address (target, mode, to_rtx);
4631 }
4632 else if (offset == 0)
4633 to_rtx = target;
4634 else
4635 abort ();
4636 emit_move_insn (to_rtx, datum);
4637 }
4638
4639 if (ibit == nbits)
4640 break;
4641 word = 0;
4642 bit_pos = 0;
4643 offset += set_word_size / BITS_PER_UNIT;
4644 }
4645 }
4646 }
4647 else if (!cleared)
4648 /* Don't bother clearing storage if the set is all ones. */
4649 if (TREE_CHAIN (elt) != NULL_TREE
4650 || (TREE_PURPOSE (elt) == NULL_TREE
4651 ? nbits != 1
4652 : ( ! host_integerp (TREE_VALUE (elt), 0)
4653 || ! host_integerp (TREE_PURPOSE (elt), 0)
4654 || (tree_low_cst (TREE_VALUE (elt), 0)
4655 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4656 != (HOST_WIDE_INT) nbits))))
4657 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4658
4659 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4660 {
4661 /* start of range of element or NULL */
4662 tree startbit = TREE_PURPOSE (elt);
4663 /* end of range of element, or element value */
4664 tree endbit = TREE_VALUE (elt);
4665 #ifdef TARGET_MEM_FUNCTIONS
4666 HOST_WIDE_INT startb, endb;
4667 #endif
4668 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4669
4670 bitlength_rtx = expand_expr (bitlength,
4671 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4672
4673 /* handle non-range tuple element like [ expr ] */
4674 if (startbit == NULL_TREE)
4675 {
4676 startbit = save_expr (endbit);
4677 endbit = startbit;
4678 }
4679
4680 startbit = convert (sizetype, startbit);
4681 endbit = convert (sizetype, endbit);
4682 if (! integer_zerop (domain_min))
4683 {
4684 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4685 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4686 }
4687 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4688 EXPAND_CONST_ADDRESS);
4689 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4690 EXPAND_CONST_ADDRESS);
4691
4692 if (REG_P (target))
4693 {
4694 targetx = assign_stack_temp (GET_MODE (target),
4695 GET_MODE_SIZE (GET_MODE (target)),
4696 0);
4697 emit_move_insn (targetx, target);
4698 }
4699
4700 else if (GET_CODE (target) == MEM)
4701 targetx = target;
4702 else
4703 abort ();
4704
4705 #ifdef TARGET_MEM_FUNCTIONS
4706 /* Optimization: If startbit and endbit are
4707 constants divisible by BITS_PER_UNIT,
4708 call memset instead. */
4709 if (TREE_CODE (startbit) == INTEGER_CST
4710 && TREE_CODE (endbit) == INTEGER_CST
4711 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4712 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4713 {
4714 emit_library_call (memset_libfunc, 0,
4715 VOIDmode, 3,
4716 plus_constant (XEXP (targetx, 0),
4717 startb / BITS_PER_UNIT),
4718 Pmode,
4719 constm1_rtx, TYPE_MODE (integer_type_node),
4720 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4721 TYPE_MODE (sizetype));
4722 }
4723 else
4724 #endif
4725 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4726 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4727 bitlength_rtx, TYPE_MODE (sizetype),
4728 startbit_rtx, TYPE_MODE (sizetype),
4729 endbit_rtx, TYPE_MODE (sizetype));
4730
4731 if (REG_P (target))
4732 emit_move_insn (target, targetx);
4733 }
4734 }
4735
4736 else
4737 abort ();
4738 }
4739
4740 /* Store the value of EXP (an expression tree)
4741 into a subfield of TARGET which has mode MODE and occupies
4742 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4743 If MODE is VOIDmode, it means that we are storing into a bit-field.
4744
4745 If VALUE_MODE is VOIDmode, return nothing in particular.
4746 UNSIGNEDP is not used in this case.
4747
4748 Otherwise, return an rtx for the value stored. This rtx
4749 has mode VALUE_MODE if that is convenient to do.
4750 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4751
4752 ALIGN is the alignment that TARGET is known to have.
4753 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4754
4755 ALIAS_SET is the alias set for the destination. This value will
4756 (in general) be different from that for TARGET, since TARGET is a
4757 reference to the containing structure. */
4758
4759 static rtx
4760 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4761 unsignedp, align, total_size, alias_set)
4762 rtx target;
4763 HOST_WIDE_INT bitsize;
4764 HOST_WIDE_INT bitpos;
4765 enum machine_mode mode;
4766 tree exp;
4767 enum machine_mode value_mode;
4768 int unsignedp;
4769 unsigned int align;
4770 HOST_WIDE_INT total_size;
4771 int alias_set;
4772 {
4773 HOST_WIDE_INT width_mask = 0;
4774
4775 if (TREE_CODE (exp) == ERROR_MARK)
4776 return const0_rtx;
4777
4778 if (bitsize < HOST_BITS_PER_WIDE_INT)
4779 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4780
4781 /* If we are storing into an unaligned field of an aligned union that is
4782 in a register, we may have the mode of TARGET being an integer mode but
4783 MODE == BLKmode. In that case, get an aligned object whose size and
4784 alignment are the same as TARGET and store TARGET into it (we can avoid
4785 the store if the field being stored is the entire width of TARGET). Then
4786 call ourselves recursively to store the field into a BLKmode version of
4787 that object. Finally, load from the object into TARGET. This is not
4788 very efficient in general, but should only be slightly more expensive
4789 than the otherwise-required unaligned accesses. Perhaps this can be
4790 cleaned up later. */
4791
4792 if (mode == BLKmode
4793 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4794 {
4795 rtx object = assign_stack_temp (GET_MODE (target),
4796 GET_MODE_SIZE (GET_MODE (target)), 0);
4797 rtx blk_object = copy_rtx (object);
4798
4799 MEM_SET_IN_STRUCT_P (object, 1);
4800 MEM_SET_IN_STRUCT_P (blk_object, 1);
4801 PUT_MODE (blk_object, BLKmode);
4802
4803 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4804 emit_move_insn (object, target);
4805
4806 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4807 align, total_size, alias_set);
4808
4809 /* Even though we aren't returning target, we need to
4810 give it the updated value. */
4811 emit_move_insn (target, object);
4812
4813 return blk_object;
4814 }
4815
4816 if (GET_CODE (target) == CONCAT)
4817 {
4818 /* We're storing into a struct containing a single __complex. */
4819
4820 if (bitpos != 0)
4821 abort ();
4822 return store_expr (exp, target, 0);
4823 }
4824
4825 /* If the structure is in a register or if the component
4826 is a bit field, we cannot use addressing to access it.
4827 Use bit-field techniques or SUBREG to store in it. */
4828
4829 if (mode == VOIDmode
4830 || (mode != BLKmode && ! direct_store[(int) mode]
4831 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4832 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4833 || GET_CODE (target) == REG
4834 || GET_CODE (target) == SUBREG
4835 /* If the field isn't aligned enough to store as an ordinary memref,
4836 store it as a bit field. */
4837 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4838 && (align < GET_MODE_ALIGNMENT (mode)
4839 || bitpos % GET_MODE_ALIGNMENT (mode)))
4840 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4841 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4842 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4843 /* If the RHS and field are a constant size and the size of the
4844 RHS isn't the same size as the bitfield, we must use bitfield
4845 operations. */
4846 || (bitsize >= 0
4847 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4848 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4849 {
4850 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4851
4852 /* If BITSIZE is narrower than the size of the type of EXP
4853 we will be narrowing TEMP. Normally, what's wanted are the
4854 low-order bits. However, if EXP's type is a record and this is
4855 big-endian machine, we want the upper BITSIZE bits. */
4856 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4857 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4858 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4859 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4860 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4861 - bitsize),
4862 temp, 1);
4863
4864 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4865 MODE. */
4866 if (mode != VOIDmode && mode != BLKmode
4867 && mode != TYPE_MODE (TREE_TYPE (exp)))
4868 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4869
4870 /* If the modes of TARGET and TEMP are both BLKmode, both
4871 must be in memory and BITPOS must be aligned on a byte
4872 boundary. If so, we simply do a block copy. */
4873 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4874 {
4875 unsigned int exp_align = expr_align (exp);
4876
4877 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4878 || bitpos % BITS_PER_UNIT != 0)
4879 abort ();
4880
4881 target = change_address (target, VOIDmode,
4882 plus_constant (XEXP (target, 0),
4883 bitpos / BITS_PER_UNIT));
4884
4885 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4886 align = MIN (exp_align, align);
4887
4888 /* Find an alignment that is consistent with the bit position. */
4889 while ((bitpos % align) != 0)
4890 align >>= 1;
4891
4892 emit_block_move (target, temp,
4893 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4894 / BITS_PER_UNIT),
4895 align);
4896
4897 return value_mode == VOIDmode ? const0_rtx : target;
4898 }
4899
4900 /* Store the value in the bitfield. */
4901 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4902 if (value_mode != VOIDmode)
4903 {
4904 /* The caller wants an rtx for the value. */
4905 /* If possible, avoid refetching from the bitfield itself. */
4906 if (width_mask != 0
4907 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4908 {
4909 tree count;
4910 enum machine_mode tmode;
4911
4912 if (unsignedp)
4913 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4914 tmode = GET_MODE (temp);
4915 if (tmode == VOIDmode)
4916 tmode = value_mode;
4917 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4918 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4919 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4920 }
4921 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4922 NULL_RTX, value_mode, 0, align,
4923 total_size);
4924 }
4925 return const0_rtx;
4926 }
4927 else
4928 {
4929 rtx addr = XEXP (target, 0);
4930 rtx to_rtx;
4931
4932 /* If a value is wanted, it must be the lhs;
4933 so make the address stable for multiple use. */
4934
4935 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4936 && ! CONSTANT_ADDRESS_P (addr)
4937 /* A frame-pointer reference is already stable. */
4938 && ! (GET_CODE (addr) == PLUS
4939 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4940 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4941 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4942 addr = copy_to_reg (addr);
4943
4944 /* Now build a reference to just the desired component. */
4945
4946 to_rtx = copy_rtx (change_address (target, mode,
4947 plus_constant (addr,
4948 (bitpos
4949 / BITS_PER_UNIT))));
4950 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4951 MEM_ALIAS_SET (to_rtx) = alias_set;
4952
4953 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4954 }
4955 }
4956 \f
4957 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4958 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4959 ARRAY_REFs and find the ultimate containing object, which we return.
4960
4961 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4962 bit position, and *PUNSIGNEDP to the signedness of the field.
4963 If the position of the field is variable, we store a tree
4964 giving the variable offset (in units) in *POFFSET.
4965 This offset is in addition to the bit position.
4966 If the position is not variable, we store 0 in *POFFSET.
4967 We set *PALIGNMENT to the alignment of the address that will be
4968 computed. This is the alignment of the thing we return if *POFFSET
4969 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4970
4971 If any of the extraction expressions is volatile,
4972 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4973
4974 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4975 is a mode that can be used to access the field. In that case, *PBITSIZE
4976 is redundant.
4977
4978 If the field describes a variable-sized object, *PMODE is set to
4979 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4980 this case, but the address of the object can be found. */
4981
4982 tree
4983 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4984 punsignedp, pvolatilep, palignment)
4985 tree exp;
4986 HOST_WIDE_INT *pbitsize;
4987 HOST_WIDE_INT *pbitpos;
4988 tree *poffset;
4989 enum machine_mode *pmode;
4990 int *punsignedp;
4991 int *pvolatilep;
4992 unsigned int *palignment;
4993 {
4994 tree size_tree = 0;
4995 enum machine_mode mode = VOIDmode;
4996 tree offset = size_zero_node;
4997 tree bit_offset = bitsize_zero_node;
4998 unsigned int alignment = BIGGEST_ALIGNMENT;
4999 tree tem;
5000
5001 /* First get the mode, signedness, and size. We do this from just the
5002 outermost expression. */
5003 if (TREE_CODE (exp) == COMPONENT_REF)
5004 {
5005 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5006 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5007 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5008
5009 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5010 }
5011 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5012 {
5013 size_tree = TREE_OPERAND (exp, 1);
5014 *punsignedp = TREE_UNSIGNED (exp);
5015 }
5016 else
5017 {
5018 mode = TYPE_MODE (TREE_TYPE (exp));
5019 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5020
5021 if (mode == BLKmode)
5022 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5023 else
5024 *pbitsize = GET_MODE_BITSIZE (mode);
5025 }
5026
5027 if (size_tree != 0)
5028 {
5029 if (! host_integerp (size_tree, 1))
5030 mode = BLKmode, *pbitsize = -1;
5031 else
5032 *pbitsize = tree_low_cst (size_tree, 1);
5033 }
5034
5035 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5036 and find the ultimate containing object. */
5037 while (1)
5038 {
5039 if (TREE_CODE (exp) == BIT_FIELD_REF)
5040 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5041 else if (TREE_CODE (exp) == COMPONENT_REF)
5042 {
5043 tree field = TREE_OPERAND (exp, 1);
5044 tree this_offset = DECL_FIELD_OFFSET (field);
5045
5046 /* If this field hasn't been filled in yet, don't go
5047 past it. This should only happen when folding expressions
5048 made during type construction. */
5049 if (this_offset == 0)
5050 break;
5051 else if (! TREE_CONSTANT (this_offset)
5052 && contains_placeholder_p (this_offset))
5053 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5054
5055 offset = size_binop (PLUS_EXPR, offset, this_offset);
5056 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5057 DECL_FIELD_BIT_OFFSET (field));
5058
5059 if (! host_integerp (offset, 0))
5060 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5061 }
5062
5063 else if (TREE_CODE (exp) == ARRAY_REF)
5064 {
5065 tree index = TREE_OPERAND (exp, 1);
5066 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5067 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5068 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5069
5070 /* We assume all arrays have sizes that are a multiple of a byte.
5071 First subtract the lower bound, if any, in the type of the
5072 index, then convert to sizetype and multiply by the size of the
5073 array element. */
5074 if (low_bound != 0 && ! integer_zerop (low_bound))
5075 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5076 index, low_bound));
5077
5078 /* If the index has a self-referential type, pass it to a
5079 WITH_RECORD_EXPR; if the component size is, pass our
5080 component to one. */
5081 if (! TREE_CONSTANT (index)
5082 && contains_placeholder_p (index))
5083 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5084 if (! TREE_CONSTANT (unit_size)
5085 && contains_placeholder_p (unit_size))
5086 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5087 TREE_OPERAND (exp, 0));
5088
5089 offset = size_binop (PLUS_EXPR, offset,
5090 size_binop (MULT_EXPR,
5091 convert (sizetype, index),
5092 unit_size));
5093 }
5094
5095 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5096 && ! ((TREE_CODE (exp) == NOP_EXPR
5097 || TREE_CODE (exp) == CONVERT_EXPR)
5098 && (TYPE_MODE (TREE_TYPE (exp))
5099 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5100 break;
5101
5102 /* If any reference in the chain is volatile, the effect is volatile. */
5103 if (TREE_THIS_VOLATILE (exp))
5104 *pvolatilep = 1;
5105
5106 /* If the offset is non-constant already, then we can't assume any
5107 alignment more than the alignment here. */
5108 if (! TREE_CONSTANT (offset))
5109 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5110
5111 exp = TREE_OPERAND (exp, 0);
5112 }
5113
5114 if (DECL_P (exp))
5115 alignment = MIN (alignment, DECL_ALIGN (exp));
5116 else if (TREE_TYPE (exp) != 0)
5117 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5118
5119 /* If OFFSET is constant, see if we can return the whole thing as a
5120 constant bit position. Otherwise, split it up. */
5121 if (host_integerp (offset, 0)
5122 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5123 bitsize_unit_node))
5124 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5125 && host_integerp (tem, 0))
5126 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5127 else
5128 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5129
5130 *pmode = mode;
5131 *palignment = alignment;
5132 return exp;
5133 }
5134
5135 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5136
5137 static enum memory_use_mode
5138 get_memory_usage_from_modifier (modifier)
5139 enum expand_modifier modifier;
5140 {
5141 switch (modifier)
5142 {
5143 case EXPAND_NORMAL:
5144 case EXPAND_SUM:
5145 return MEMORY_USE_RO;
5146 break;
5147 case EXPAND_MEMORY_USE_WO:
5148 return MEMORY_USE_WO;
5149 break;
5150 case EXPAND_MEMORY_USE_RW:
5151 return MEMORY_USE_RW;
5152 break;
5153 case EXPAND_MEMORY_USE_DONT:
5154 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5155 MEMORY_USE_DONT, because they are modifiers to a call of
5156 expand_expr in the ADDR_EXPR case of expand_expr. */
5157 case EXPAND_CONST_ADDRESS:
5158 case EXPAND_INITIALIZER:
5159 return MEMORY_USE_DONT;
5160 case EXPAND_MEMORY_USE_BAD:
5161 default:
5162 abort ();
5163 }
5164 }
5165 \f
5166 /* Given an rtx VALUE that may contain additions and multiplications,
5167 return an equivalent value that just refers to a register or memory.
5168 This is done by generating instructions to perform the arithmetic
5169 and returning a pseudo-register containing the value.
5170
5171 The returned value may be a REG, SUBREG, MEM or constant. */
5172
5173 rtx
5174 force_operand (value, target)
5175 rtx value, target;
5176 {
5177 register optab binoptab = 0;
5178 /* Use a temporary to force order of execution of calls to
5179 `force_operand'. */
5180 rtx tmp;
5181 register rtx op2;
5182 /* Use subtarget as the target for operand 0 of a binary operation. */
5183 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5184
5185 /* Check for a PIC address load. */
5186 if (flag_pic
5187 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5188 && XEXP (value, 0) == pic_offset_table_rtx
5189 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5190 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5191 || GET_CODE (XEXP (value, 1)) == CONST))
5192 {
5193 if (!subtarget)
5194 subtarget = gen_reg_rtx (GET_MODE (value));
5195 emit_move_insn (subtarget, value);
5196 return subtarget;
5197 }
5198
5199 if (GET_CODE (value) == PLUS)
5200 binoptab = add_optab;
5201 else if (GET_CODE (value) == MINUS)
5202 binoptab = sub_optab;
5203 else if (GET_CODE (value) == MULT)
5204 {
5205 op2 = XEXP (value, 1);
5206 if (!CONSTANT_P (op2)
5207 && !(GET_CODE (op2) == REG && op2 != subtarget))
5208 subtarget = 0;
5209 tmp = force_operand (XEXP (value, 0), subtarget);
5210 return expand_mult (GET_MODE (value), tmp,
5211 force_operand (op2, NULL_RTX),
5212 target, 0);
5213 }
5214
5215 if (binoptab)
5216 {
5217 op2 = XEXP (value, 1);
5218 if (!CONSTANT_P (op2)
5219 && !(GET_CODE (op2) == REG && op2 != subtarget))
5220 subtarget = 0;
5221 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5222 {
5223 binoptab = add_optab;
5224 op2 = negate_rtx (GET_MODE (value), op2);
5225 }
5226
5227 /* Check for an addition with OP2 a constant integer and our first
5228 operand a PLUS of a virtual register and something else. In that
5229 case, we want to emit the sum of the virtual register and the
5230 constant first and then add the other value. This allows virtual
5231 register instantiation to simply modify the constant rather than
5232 creating another one around this addition. */
5233 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5234 && GET_CODE (XEXP (value, 0)) == PLUS
5235 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5236 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5237 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5238 {
5239 rtx temp = expand_binop (GET_MODE (value), binoptab,
5240 XEXP (XEXP (value, 0), 0), op2,
5241 subtarget, 0, OPTAB_LIB_WIDEN);
5242 return expand_binop (GET_MODE (value), binoptab, temp,
5243 force_operand (XEXP (XEXP (value, 0), 1), 0),
5244 target, 0, OPTAB_LIB_WIDEN);
5245 }
5246
5247 tmp = force_operand (XEXP (value, 0), subtarget);
5248 return expand_binop (GET_MODE (value), binoptab, tmp,
5249 force_operand (op2, NULL_RTX),
5250 target, 0, OPTAB_LIB_WIDEN);
5251 /* We give UNSIGNEDP = 0 to expand_binop
5252 because the only operations we are expanding here are signed ones. */
5253 }
5254 return value;
5255 }
5256 \f
5257 /* Subroutine of expand_expr:
5258 save the non-copied parts (LIST) of an expr (LHS), and return a list
5259 which can restore these values to their previous values,
5260 should something modify their storage. */
5261
5262 static tree
5263 save_noncopied_parts (lhs, list)
5264 tree lhs;
5265 tree list;
5266 {
5267 tree tail;
5268 tree parts = 0;
5269
5270 for (tail = list; tail; tail = TREE_CHAIN (tail))
5271 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5272 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5273 else
5274 {
5275 tree part = TREE_VALUE (tail);
5276 tree part_type = TREE_TYPE (part);
5277 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5278 rtx target = assign_temp (part_type, 0, 1, 1);
5279 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5280 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5281 parts = tree_cons (to_be_saved,
5282 build (RTL_EXPR, part_type, NULL_TREE,
5283 (tree) target),
5284 parts);
5285 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5286 }
5287 return parts;
5288 }
5289
5290 /* Subroutine of expand_expr:
5291 record the non-copied parts (LIST) of an expr (LHS), and return a list
5292 which specifies the initial values of these parts. */
5293
5294 static tree
5295 init_noncopied_parts (lhs, list)
5296 tree lhs;
5297 tree list;
5298 {
5299 tree tail;
5300 tree parts = 0;
5301
5302 for (tail = list; tail; tail = TREE_CHAIN (tail))
5303 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5304 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5305 else if (TREE_PURPOSE (tail))
5306 {
5307 tree part = TREE_VALUE (tail);
5308 tree part_type = TREE_TYPE (part);
5309 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5310 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5311 }
5312 return parts;
5313 }
5314
5315 /* Subroutine of expand_expr: return nonzero iff there is no way that
5316 EXP can reference X, which is being modified. TOP_P is nonzero if this
5317 call is going to be used to determine whether we need a temporary
5318 for EXP, as opposed to a recursive call to this function.
5319
5320 It is always safe for this routine to return zero since it merely
5321 searches for optimization opportunities. */
5322
5323 static int
5324 safe_from_p (x, exp, top_p)
5325 rtx x;
5326 tree exp;
5327 int top_p;
5328 {
5329 rtx exp_rtl = 0;
5330 int i, nops;
5331 static int save_expr_count;
5332 static int save_expr_size = 0;
5333 static tree *save_expr_rewritten;
5334 static tree save_expr_trees[256];
5335
5336 if (x == 0
5337 /* If EXP has varying size, we MUST use a target since we currently
5338 have no way of allocating temporaries of variable size
5339 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5340 So we assume here that something at a higher level has prevented a
5341 clash. This is somewhat bogus, but the best we can do. Only
5342 do this when X is BLKmode and when we are at the top level. */
5343 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5344 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5345 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5346 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5347 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5348 != INTEGER_CST)
5349 && GET_MODE (x) == BLKmode))
5350 return 1;
5351
5352 if (top_p && save_expr_size == 0)
5353 {
5354 int rtn;
5355
5356 save_expr_count = 0;
5357 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5358 save_expr_rewritten = &save_expr_trees[0];
5359
5360 rtn = safe_from_p (x, exp, 1);
5361
5362 for (i = 0; i < save_expr_count; ++i)
5363 {
5364 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5365 abort ();
5366 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5367 }
5368
5369 save_expr_size = 0;
5370
5371 return rtn;
5372 }
5373
5374 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5375 find the underlying pseudo. */
5376 if (GET_CODE (x) == SUBREG)
5377 {
5378 x = SUBREG_REG (x);
5379 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5380 return 0;
5381 }
5382
5383 /* If X is a location in the outgoing argument area, it is always safe. */
5384 if (GET_CODE (x) == MEM
5385 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5386 || (GET_CODE (XEXP (x, 0)) == PLUS
5387 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5388 return 1;
5389
5390 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5391 {
5392 case 'd':
5393 exp_rtl = DECL_RTL (exp);
5394 break;
5395
5396 case 'c':
5397 return 1;
5398
5399 case 'x':
5400 if (TREE_CODE (exp) == TREE_LIST)
5401 return ((TREE_VALUE (exp) == 0
5402 || safe_from_p (x, TREE_VALUE (exp), 0))
5403 && (TREE_CHAIN (exp) == 0
5404 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5405 else if (TREE_CODE (exp) == ERROR_MARK)
5406 return 1; /* An already-visited SAVE_EXPR? */
5407 else
5408 return 0;
5409
5410 case '1':
5411 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5412
5413 case '2':
5414 case '<':
5415 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5416 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5417
5418 case 'e':
5419 case 'r':
5420 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5421 the expression. If it is set, we conflict iff we are that rtx or
5422 both are in memory. Otherwise, we check all operands of the
5423 expression recursively. */
5424
5425 switch (TREE_CODE (exp))
5426 {
5427 case ADDR_EXPR:
5428 return (staticp (TREE_OPERAND (exp, 0))
5429 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5430 || TREE_STATIC (exp));
5431
5432 case INDIRECT_REF:
5433 if (GET_CODE (x) == MEM)
5434 return 0;
5435 break;
5436
5437 case CALL_EXPR:
5438 exp_rtl = CALL_EXPR_RTL (exp);
5439 if (exp_rtl == 0)
5440 {
5441 /* Assume that the call will clobber all hard registers and
5442 all of memory. */
5443 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5444 || GET_CODE (x) == MEM)
5445 return 0;
5446 }
5447
5448 break;
5449
5450 case RTL_EXPR:
5451 /* If a sequence exists, we would have to scan every instruction
5452 in the sequence to see if it was safe. This is probably not
5453 worthwhile. */
5454 if (RTL_EXPR_SEQUENCE (exp))
5455 return 0;
5456
5457 exp_rtl = RTL_EXPR_RTL (exp);
5458 break;
5459
5460 case WITH_CLEANUP_EXPR:
5461 exp_rtl = RTL_EXPR_RTL (exp);
5462 break;
5463
5464 case CLEANUP_POINT_EXPR:
5465 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5466
5467 case SAVE_EXPR:
5468 exp_rtl = SAVE_EXPR_RTL (exp);
5469 if (exp_rtl)
5470 break;
5471
5472 /* This SAVE_EXPR might appear many times in the top-level
5473 safe_from_p() expression, and if it has a complex
5474 subexpression, examining it multiple times could result
5475 in a combinatorial explosion. E.g. on an Alpha
5476 running at least 200MHz, a Fortran test case compiled with
5477 optimization took about 28 minutes to compile -- even though
5478 it was only a few lines long, and the complicated line causing
5479 so much time to be spent in the earlier version of safe_from_p()
5480 had only 293 or so unique nodes.
5481
5482 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5483 where it is so we can turn it back in the top-level safe_from_p()
5484 when we're done. */
5485
5486 /* For now, don't bother re-sizing the array. */
5487 if (save_expr_count >= save_expr_size)
5488 return 0;
5489 save_expr_rewritten[save_expr_count++] = exp;
5490
5491 nops = tree_code_length[(int) SAVE_EXPR];
5492 for (i = 0; i < nops; i++)
5493 {
5494 tree operand = TREE_OPERAND (exp, i);
5495 if (operand == NULL_TREE)
5496 continue;
5497 TREE_SET_CODE (exp, ERROR_MARK);
5498 if (!safe_from_p (x, operand, 0))
5499 return 0;
5500 TREE_SET_CODE (exp, SAVE_EXPR);
5501 }
5502 TREE_SET_CODE (exp, ERROR_MARK);
5503 return 1;
5504
5505 case BIND_EXPR:
5506 /* The only operand we look at is operand 1. The rest aren't
5507 part of the expression. */
5508 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5509
5510 case METHOD_CALL_EXPR:
5511 /* This takes a rtx argument, but shouldn't appear here. */
5512 abort ();
5513
5514 default:
5515 break;
5516 }
5517
5518 /* If we have an rtx, we do not need to scan our operands. */
5519 if (exp_rtl)
5520 break;
5521
5522 nops = tree_code_length[(int) TREE_CODE (exp)];
5523 for (i = 0; i < nops; i++)
5524 if (TREE_OPERAND (exp, i) != 0
5525 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5526 return 0;
5527 }
5528
5529 /* If we have an rtl, find any enclosed object. Then see if we conflict
5530 with it. */
5531 if (exp_rtl)
5532 {
5533 if (GET_CODE (exp_rtl) == SUBREG)
5534 {
5535 exp_rtl = SUBREG_REG (exp_rtl);
5536 if (GET_CODE (exp_rtl) == REG
5537 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5538 return 0;
5539 }
5540
5541 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5542 are memory and EXP is not readonly. */
5543 return ! (rtx_equal_p (x, exp_rtl)
5544 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5545 && ! TREE_READONLY (exp)));
5546 }
5547
5548 /* If we reach here, it is safe. */
5549 return 1;
5550 }
5551
5552 /* Subroutine of expand_expr: return nonzero iff EXP is an
5553 expression whose type is statically determinable. */
5554
5555 static int
5556 fixed_type_p (exp)
5557 tree exp;
5558 {
5559 if (TREE_CODE (exp) == PARM_DECL
5560 || TREE_CODE (exp) == VAR_DECL
5561 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5562 || TREE_CODE (exp) == COMPONENT_REF
5563 || TREE_CODE (exp) == ARRAY_REF)
5564 return 1;
5565 return 0;
5566 }
5567
5568 /* Subroutine of expand_expr: return rtx if EXP is a
5569 variable or parameter; else return 0. */
5570
5571 static rtx
5572 var_rtx (exp)
5573 tree exp;
5574 {
5575 STRIP_NOPS (exp);
5576 switch (TREE_CODE (exp))
5577 {
5578 case PARM_DECL:
5579 case VAR_DECL:
5580 return DECL_RTL (exp);
5581 default:
5582 return 0;
5583 }
5584 }
5585
5586 #ifdef MAX_INTEGER_COMPUTATION_MODE
5587 void
5588 check_max_integer_computation_mode (exp)
5589 tree exp;
5590 {
5591 enum tree_code code;
5592 enum machine_mode mode;
5593
5594 /* Strip any NOPs that don't change the mode. */
5595 STRIP_NOPS (exp);
5596 code = TREE_CODE (exp);
5597
5598 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5599 if (code == NOP_EXPR
5600 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5601 return;
5602
5603 /* First check the type of the overall operation. We need only look at
5604 unary, binary and relational operations. */
5605 if (TREE_CODE_CLASS (code) == '1'
5606 || TREE_CODE_CLASS (code) == '2'
5607 || TREE_CODE_CLASS (code) == '<')
5608 {
5609 mode = TYPE_MODE (TREE_TYPE (exp));
5610 if (GET_MODE_CLASS (mode) == MODE_INT
5611 && mode > MAX_INTEGER_COMPUTATION_MODE)
5612 fatal ("unsupported wide integer operation");
5613 }
5614
5615 /* Check operand of a unary op. */
5616 if (TREE_CODE_CLASS (code) == '1')
5617 {
5618 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5619 if (GET_MODE_CLASS (mode) == MODE_INT
5620 && mode > MAX_INTEGER_COMPUTATION_MODE)
5621 fatal ("unsupported wide integer operation");
5622 }
5623
5624 /* Check operands of a binary/comparison op. */
5625 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5626 {
5627 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5628 if (GET_MODE_CLASS (mode) == MODE_INT
5629 && mode > MAX_INTEGER_COMPUTATION_MODE)
5630 fatal ("unsupported wide integer operation");
5631
5632 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5633 if (GET_MODE_CLASS (mode) == MODE_INT
5634 && mode > MAX_INTEGER_COMPUTATION_MODE)
5635 fatal ("unsupported wide integer operation");
5636 }
5637 }
5638 #endif
5639
5640 \f
5641 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5642 has any readonly fields. If any of the fields have types that
5643 contain readonly fields, return true as well. */
5644
5645 static int
5646 readonly_fields_p (type)
5647 tree type;
5648 {
5649 tree field;
5650
5651 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5652 if (TREE_CODE (field) == FIELD_DECL
5653 && (TREE_READONLY (field)
5654 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5655 && readonly_fields_p (TREE_TYPE (field)))))
5656 return 1;
5657
5658 return 0;
5659 }
5660 \f
5661 /* expand_expr: generate code for computing expression EXP.
5662 An rtx for the computed value is returned. The value is never null.
5663 In the case of a void EXP, const0_rtx is returned.
5664
5665 The value may be stored in TARGET if TARGET is nonzero.
5666 TARGET is just a suggestion; callers must assume that
5667 the rtx returned may not be the same as TARGET.
5668
5669 If TARGET is CONST0_RTX, it means that the value will be ignored.
5670
5671 If TMODE is not VOIDmode, it suggests generating the
5672 result in mode TMODE. But this is done only when convenient.
5673 Otherwise, TMODE is ignored and the value generated in its natural mode.
5674 TMODE is just a suggestion; callers must assume that
5675 the rtx returned may not have mode TMODE.
5676
5677 Note that TARGET may have neither TMODE nor MODE. In that case, it
5678 probably will not be used.
5679
5680 If MODIFIER is EXPAND_SUM then when EXP is an addition
5681 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5682 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5683 products as above, or REG or MEM, or constant.
5684 Ordinarily in such cases we would output mul or add instructions
5685 and then return a pseudo reg containing the sum.
5686
5687 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5688 it also marks a label as absolutely required (it can't be dead).
5689 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5690 This is used for outputting expressions used in initializers.
5691
5692 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5693 with a constant address even if that address is not normally legitimate.
5694 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5695
5696 rtx
5697 expand_expr (exp, target, tmode, modifier)
5698 register tree exp;
5699 rtx target;
5700 enum machine_mode tmode;
5701 enum expand_modifier modifier;
5702 {
5703 register rtx op0, op1, temp;
5704 tree type = TREE_TYPE (exp);
5705 int unsignedp = TREE_UNSIGNED (type);
5706 register enum machine_mode mode;
5707 register enum tree_code code = TREE_CODE (exp);
5708 optab this_optab;
5709 rtx subtarget, original_target;
5710 int ignore;
5711 tree context;
5712 /* Used by check-memory-usage to make modifier read only. */
5713 enum expand_modifier ro_modifier;
5714
5715 /* Handle ERROR_MARK before anybody tries to access its type. */
5716 if (TREE_CODE (exp) == ERROR_MARK)
5717 {
5718 op0 = CONST0_RTX (tmode);
5719 if (op0 != 0)
5720 return op0;
5721 return const0_rtx;
5722 }
5723
5724 mode = TYPE_MODE (type);
5725 /* Use subtarget as the target for operand 0 of a binary operation. */
5726 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5727 original_target = target;
5728 ignore = (target == const0_rtx
5729 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5730 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5731 || code == COND_EXPR)
5732 && TREE_CODE (type) == VOID_TYPE));
5733
5734 /* Make a read-only version of the modifier. */
5735 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5736 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5737 ro_modifier = modifier;
5738 else
5739 ro_modifier = EXPAND_NORMAL;
5740
5741 /* Don't use hard regs as subtargets, because the combiner
5742 can only handle pseudo regs. */
5743 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5744 subtarget = 0;
5745 /* Avoid subtargets inside loops,
5746 since they hide some invariant expressions. */
5747 if (preserve_subexpressions_p ())
5748 subtarget = 0;
5749
5750 /* If we are going to ignore this result, we need only do something
5751 if there is a side-effect somewhere in the expression. If there
5752 is, short-circuit the most common cases here. Note that we must
5753 not call expand_expr with anything but const0_rtx in case this
5754 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5755
5756 if (ignore)
5757 {
5758 if (! TREE_SIDE_EFFECTS (exp))
5759 return const0_rtx;
5760
5761 /* Ensure we reference a volatile object even if value is ignored, but
5762 don't do this if all we are doing is taking its address. */
5763 if (TREE_THIS_VOLATILE (exp)
5764 && TREE_CODE (exp) != FUNCTION_DECL
5765 && mode != VOIDmode && mode != BLKmode
5766 && modifier != EXPAND_CONST_ADDRESS)
5767 {
5768 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5769 if (GET_CODE (temp) == MEM)
5770 temp = copy_to_reg (temp);
5771 return const0_rtx;
5772 }
5773
5774 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5775 || code == INDIRECT_REF || code == BUFFER_REF)
5776 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5777 VOIDmode, ro_modifier);
5778 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5779 || code == ARRAY_REF)
5780 {
5781 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5782 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5783 return const0_rtx;
5784 }
5785 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5786 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5787 /* If the second operand has no side effects, just evaluate
5788 the first. */
5789 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5790 VOIDmode, ro_modifier);
5791 else if (code == BIT_FIELD_REF)
5792 {
5793 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5794 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5795 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5796 return const0_rtx;
5797 }
5798 ;
5799 target = 0;
5800 }
5801
5802 #ifdef MAX_INTEGER_COMPUTATION_MODE
5803 /* Only check stuff here if the mode we want is different from the mode
5804 of the expression; if it's the same, check_max_integer_computiation_mode
5805 will handle it. Do we really need to check this stuff at all? */
5806
5807 if (target
5808 && GET_MODE (target) != mode
5809 && TREE_CODE (exp) != INTEGER_CST
5810 && TREE_CODE (exp) != PARM_DECL
5811 && TREE_CODE (exp) != ARRAY_REF
5812 && TREE_CODE (exp) != COMPONENT_REF
5813 && TREE_CODE (exp) != BIT_FIELD_REF
5814 && TREE_CODE (exp) != INDIRECT_REF
5815 && TREE_CODE (exp) != CALL_EXPR
5816 && TREE_CODE (exp) != VAR_DECL
5817 && TREE_CODE (exp) != RTL_EXPR)
5818 {
5819 enum machine_mode mode = GET_MODE (target);
5820
5821 if (GET_MODE_CLASS (mode) == MODE_INT
5822 && mode > MAX_INTEGER_COMPUTATION_MODE)
5823 fatal ("unsupported wide integer operation");
5824 }
5825
5826 if (tmode != mode
5827 && TREE_CODE (exp) != INTEGER_CST
5828 && TREE_CODE (exp) != PARM_DECL
5829 && TREE_CODE (exp) != ARRAY_REF
5830 && TREE_CODE (exp) != COMPONENT_REF
5831 && TREE_CODE (exp) != BIT_FIELD_REF
5832 && TREE_CODE (exp) != INDIRECT_REF
5833 && TREE_CODE (exp) != VAR_DECL
5834 && TREE_CODE (exp) != CALL_EXPR
5835 && TREE_CODE (exp) != RTL_EXPR
5836 && GET_MODE_CLASS (tmode) == MODE_INT
5837 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5838 fatal ("unsupported wide integer operation");
5839
5840 check_max_integer_computation_mode (exp);
5841 #endif
5842
5843 /* If will do cse, generate all results into pseudo registers
5844 since 1) that allows cse to find more things
5845 and 2) otherwise cse could produce an insn the machine
5846 cannot support. */
5847
5848 if (! cse_not_expected && mode != BLKmode && target
5849 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5850 target = subtarget;
5851
5852 switch (code)
5853 {
5854 case LABEL_DECL:
5855 {
5856 tree function = decl_function_context (exp);
5857 /* Handle using a label in a containing function. */
5858 if (function != current_function_decl
5859 && function != inline_function_decl && function != 0)
5860 {
5861 struct function *p = find_function_data (function);
5862 /* Allocate in the memory associated with the function
5863 that the label is in. */
5864 push_obstacks (p->function_obstack,
5865 p->function_maybepermanent_obstack);
5866
5867 p->expr->x_forced_labels
5868 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5869 p->expr->x_forced_labels);
5870 pop_obstacks ();
5871 }
5872 else
5873 {
5874 if (modifier == EXPAND_INITIALIZER)
5875 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5876 label_rtx (exp),
5877 forced_labels);
5878 }
5879
5880 temp = gen_rtx_MEM (FUNCTION_MODE,
5881 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5882 if (function != current_function_decl
5883 && function != inline_function_decl && function != 0)
5884 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5885 return temp;
5886 }
5887
5888 case PARM_DECL:
5889 if (DECL_RTL (exp) == 0)
5890 {
5891 error_with_decl (exp, "prior parameter's size depends on `%s'");
5892 return CONST0_RTX (mode);
5893 }
5894
5895 /* ... fall through ... */
5896
5897 case VAR_DECL:
5898 /* If a static var's type was incomplete when the decl was written,
5899 but the type is complete now, lay out the decl now. */
5900 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5901 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5902 {
5903 push_obstacks_nochange ();
5904 end_temporary_allocation ();
5905 layout_decl (exp, 0);
5906 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5907 pop_obstacks ();
5908 }
5909
5910 /* Although static-storage variables start off initialized, according to
5911 ANSI C, a memcpy could overwrite them with uninitialized values. So
5912 we check them too. This also lets us check for read-only variables
5913 accessed via a non-const declaration, in case it won't be detected
5914 any other way (e.g., in an embedded system or OS kernel without
5915 memory protection).
5916
5917 Aggregates are not checked here; they're handled elsewhere. */
5918 if (cfun && current_function_check_memory_usage
5919 && code == VAR_DECL
5920 && GET_CODE (DECL_RTL (exp)) == MEM
5921 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5922 {
5923 enum memory_use_mode memory_usage;
5924 memory_usage = get_memory_usage_from_modifier (modifier);
5925
5926 if (memory_usage != MEMORY_USE_DONT)
5927 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5928 XEXP (DECL_RTL (exp), 0), Pmode,
5929 GEN_INT (int_size_in_bytes (type)),
5930 TYPE_MODE (sizetype),
5931 GEN_INT (memory_usage),
5932 TYPE_MODE (integer_type_node));
5933 }
5934
5935 /* ... fall through ... */
5936
5937 case FUNCTION_DECL:
5938 case RESULT_DECL:
5939 if (DECL_RTL (exp) == 0)
5940 abort ();
5941
5942 /* Ensure variable marked as used even if it doesn't go through
5943 a parser. If it hasn't be used yet, write out an external
5944 definition. */
5945 if (! TREE_USED (exp))
5946 {
5947 assemble_external (exp);
5948 TREE_USED (exp) = 1;
5949 }
5950
5951 /* Show we haven't gotten RTL for this yet. */
5952 temp = 0;
5953
5954 /* Handle variables inherited from containing functions. */
5955 context = decl_function_context (exp);
5956
5957 /* We treat inline_function_decl as an alias for the current function
5958 because that is the inline function whose vars, types, etc.
5959 are being merged into the current function.
5960 See expand_inline_function. */
5961
5962 if (context != 0 && context != current_function_decl
5963 && context != inline_function_decl
5964 /* If var is static, we don't need a static chain to access it. */
5965 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5966 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5967 {
5968 rtx addr;
5969
5970 /* Mark as non-local and addressable. */
5971 DECL_NONLOCAL (exp) = 1;
5972 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5973 abort ();
5974 mark_addressable (exp);
5975 if (GET_CODE (DECL_RTL (exp)) != MEM)
5976 abort ();
5977 addr = XEXP (DECL_RTL (exp), 0);
5978 if (GET_CODE (addr) == MEM)
5979 addr = gen_rtx_MEM (Pmode,
5980 fix_lexical_addr (XEXP (addr, 0), exp));
5981 else
5982 addr = fix_lexical_addr (addr, exp);
5983 temp = change_address (DECL_RTL (exp), mode, addr);
5984 }
5985
5986 /* This is the case of an array whose size is to be determined
5987 from its initializer, while the initializer is still being parsed.
5988 See expand_decl. */
5989
5990 else if (GET_CODE (DECL_RTL (exp)) == MEM
5991 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5992 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5993 XEXP (DECL_RTL (exp), 0));
5994
5995 /* If DECL_RTL is memory, we are in the normal case and either
5996 the address is not valid or it is not a register and -fforce-addr
5997 is specified, get the address into a register. */
5998
5999 else if (GET_CODE (DECL_RTL (exp)) == MEM
6000 && modifier != EXPAND_CONST_ADDRESS
6001 && modifier != EXPAND_SUM
6002 && modifier != EXPAND_INITIALIZER
6003 && (! memory_address_p (DECL_MODE (exp),
6004 XEXP (DECL_RTL (exp), 0))
6005 || (flag_force_addr
6006 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6007 temp = change_address (DECL_RTL (exp), VOIDmode,
6008 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6009
6010 /* If we got something, return it. But first, set the alignment
6011 the address is a register. */
6012 if (temp != 0)
6013 {
6014 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6015 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6016
6017 return temp;
6018 }
6019
6020 /* If the mode of DECL_RTL does not match that of the decl, it
6021 must be a promoted value. We return a SUBREG of the wanted mode,
6022 but mark it so that we know that it was already extended. */
6023
6024 if (GET_CODE (DECL_RTL (exp)) == REG
6025 && GET_MODE (DECL_RTL (exp)) != mode)
6026 {
6027 /* Get the signedness used for this variable. Ensure we get the
6028 same mode we got when the variable was declared. */
6029 if (GET_MODE (DECL_RTL (exp))
6030 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6031 abort ();
6032
6033 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6034 SUBREG_PROMOTED_VAR_P (temp) = 1;
6035 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6036 return temp;
6037 }
6038
6039 return DECL_RTL (exp);
6040
6041 case INTEGER_CST:
6042 return immed_double_const (TREE_INT_CST_LOW (exp),
6043 TREE_INT_CST_HIGH (exp), mode);
6044
6045 case CONST_DECL:
6046 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6047 EXPAND_MEMORY_USE_BAD);
6048
6049 case REAL_CST:
6050 /* If optimized, generate immediate CONST_DOUBLE
6051 which will be turned into memory by reload if necessary.
6052
6053 We used to force a register so that loop.c could see it. But
6054 this does not allow gen_* patterns to perform optimizations with
6055 the constants. It also produces two insns in cases like "x = 1.0;".
6056 On most machines, floating-point constants are not permitted in
6057 many insns, so we'd end up copying it to a register in any case.
6058
6059 Now, we do the copying in expand_binop, if appropriate. */
6060 return immed_real_const (exp);
6061
6062 case COMPLEX_CST:
6063 case STRING_CST:
6064 if (! TREE_CST_RTL (exp))
6065 output_constant_def (exp);
6066
6067 /* TREE_CST_RTL probably contains a constant address.
6068 On RISC machines where a constant address isn't valid,
6069 make some insns to get that address into a register. */
6070 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6071 && modifier != EXPAND_CONST_ADDRESS
6072 && modifier != EXPAND_INITIALIZER
6073 && modifier != EXPAND_SUM
6074 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6075 || (flag_force_addr
6076 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6077 return change_address (TREE_CST_RTL (exp), VOIDmode,
6078 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6079 return TREE_CST_RTL (exp);
6080
6081 case EXPR_WITH_FILE_LOCATION:
6082 {
6083 rtx to_return;
6084 char *saved_input_filename = input_filename;
6085 int saved_lineno = lineno;
6086 input_filename = EXPR_WFL_FILENAME (exp);
6087 lineno = EXPR_WFL_LINENO (exp);
6088 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6089 emit_line_note (input_filename, lineno);
6090 /* Possibly avoid switching back and force here */
6091 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6092 input_filename = saved_input_filename;
6093 lineno = saved_lineno;
6094 return to_return;
6095 }
6096
6097 case SAVE_EXPR:
6098 context = decl_function_context (exp);
6099
6100 /* If this SAVE_EXPR was at global context, assume we are an
6101 initialization function and move it into our context. */
6102 if (context == 0)
6103 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6104
6105 /* We treat inline_function_decl as an alias for the current function
6106 because that is the inline function whose vars, types, etc.
6107 are being merged into the current function.
6108 See expand_inline_function. */
6109 if (context == current_function_decl || context == inline_function_decl)
6110 context = 0;
6111
6112 /* If this is non-local, handle it. */
6113 if (context)
6114 {
6115 /* The following call just exists to abort if the context is
6116 not of a containing function. */
6117 find_function_data (context);
6118
6119 temp = SAVE_EXPR_RTL (exp);
6120 if (temp && GET_CODE (temp) == REG)
6121 {
6122 put_var_into_stack (exp);
6123 temp = SAVE_EXPR_RTL (exp);
6124 }
6125 if (temp == 0 || GET_CODE (temp) != MEM)
6126 abort ();
6127 return change_address (temp, mode,
6128 fix_lexical_addr (XEXP (temp, 0), exp));
6129 }
6130 if (SAVE_EXPR_RTL (exp) == 0)
6131 {
6132 if (mode == VOIDmode)
6133 temp = const0_rtx;
6134 else
6135 temp = assign_temp (type, 3, 0, 0);
6136
6137 SAVE_EXPR_RTL (exp) = temp;
6138 if (!optimize && GET_CODE (temp) == REG)
6139 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6140 save_expr_regs);
6141
6142 /* If the mode of TEMP does not match that of the expression, it
6143 must be a promoted value. We pass store_expr a SUBREG of the
6144 wanted mode but mark it so that we know that it was already
6145 extended. Note that `unsignedp' was modified above in
6146 this case. */
6147
6148 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6149 {
6150 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6151 SUBREG_PROMOTED_VAR_P (temp) = 1;
6152 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6153 }
6154
6155 if (temp == const0_rtx)
6156 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6157 EXPAND_MEMORY_USE_BAD);
6158 else
6159 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6160
6161 TREE_USED (exp) = 1;
6162 }
6163
6164 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6165 must be a promoted value. We return a SUBREG of the wanted mode,
6166 but mark it so that we know that it was already extended. */
6167
6168 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6169 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6170 {
6171 /* Compute the signedness and make the proper SUBREG. */
6172 promote_mode (type, mode, &unsignedp, 0);
6173 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6174 SUBREG_PROMOTED_VAR_P (temp) = 1;
6175 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6176 return temp;
6177 }
6178
6179 return SAVE_EXPR_RTL (exp);
6180
6181 case UNSAVE_EXPR:
6182 {
6183 rtx temp;
6184 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6185 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6186 return temp;
6187 }
6188
6189 case PLACEHOLDER_EXPR:
6190 {
6191 tree placeholder_expr;
6192
6193 /* If there is an object on the head of the placeholder list,
6194 see if some object in it of type TYPE or a pointer to it. For
6195 further information, see tree.def. */
6196 for (placeholder_expr = placeholder_list;
6197 placeholder_expr != 0;
6198 placeholder_expr = TREE_CHAIN (placeholder_expr))
6199 {
6200 tree need_type = TYPE_MAIN_VARIANT (type);
6201 tree object = 0;
6202 tree old_list = placeholder_list;
6203 tree elt;
6204
6205 /* Find the outermost reference that is of the type we want.
6206 If none, see if any object has a type that is a pointer to
6207 the type we want. */
6208 for (elt = TREE_PURPOSE (placeholder_expr);
6209 elt != 0 && object == 0;
6210 elt
6211 = ((TREE_CODE (elt) == COMPOUND_EXPR
6212 || TREE_CODE (elt) == COND_EXPR)
6213 ? TREE_OPERAND (elt, 1)
6214 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6215 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6216 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6217 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6218 ? TREE_OPERAND (elt, 0) : 0))
6219 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6220 object = elt;
6221
6222 for (elt = TREE_PURPOSE (placeholder_expr);
6223 elt != 0 && object == 0;
6224 elt
6225 = ((TREE_CODE (elt) == COMPOUND_EXPR
6226 || TREE_CODE (elt) == COND_EXPR)
6227 ? TREE_OPERAND (elt, 1)
6228 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6229 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6230 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6231 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6232 ? TREE_OPERAND (elt, 0) : 0))
6233 if (POINTER_TYPE_P (TREE_TYPE (elt))
6234 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6235 == need_type))
6236 object = build1 (INDIRECT_REF, need_type, elt);
6237
6238 if (object != 0)
6239 {
6240 /* Expand this object skipping the list entries before
6241 it was found in case it is also a PLACEHOLDER_EXPR.
6242 In that case, we want to translate it using subsequent
6243 entries. */
6244 placeholder_list = TREE_CHAIN (placeholder_expr);
6245 temp = expand_expr (object, original_target, tmode,
6246 ro_modifier);
6247 placeholder_list = old_list;
6248 return temp;
6249 }
6250 }
6251 }
6252
6253 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6254 abort ();
6255
6256 case WITH_RECORD_EXPR:
6257 /* Put the object on the placeholder list, expand our first operand,
6258 and pop the list. */
6259 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6260 placeholder_list);
6261 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6262 tmode, ro_modifier);
6263 placeholder_list = TREE_CHAIN (placeholder_list);
6264 return target;
6265
6266 case GOTO_EXPR:
6267 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6268 expand_goto (TREE_OPERAND (exp, 0));
6269 else
6270 expand_computed_goto (TREE_OPERAND (exp, 0));
6271 return const0_rtx;
6272
6273 case EXIT_EXPR:
6274 expand_exit_loop_if_false (NULL_PTR,
6275 invert_truthvalue (TREE_OPERAND (exp, 0)));
6276 return const0_rtx;
6277
6278 case LABELED_BLOCK_EXPR:
6279 if (LABELED_BLOCK_BODY (exp))
6280 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6281 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6282 return const0_rtx;
6283
6284 case EXIT_BLOCK_EXPR:
6285 if (EXIT_BLOCK_RETURN (exp))
6286 sorry ("returned value in block_exit_expr");
6287 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6288 return const0_rtx;
6289
6290 case LOOP_EXPR:
6291 push_temp_slots ();
6292 expand_start_loop (1);
6293 expand_expr_stmt (TREE_OPERAND (exp, 0));
6294 expand_end_loop ();
6295 pop_temp_slots ();
6296
6297 return const0_rtx;
6298
6299 case BIND_EXPR:
6300 {
6301 tree vars = TREE_OPERAND (exp, 0);
6302 int vars_need_expansion = 0;
6303
6304 /* Need to open a binding contour here because
6305 if there are any cleanups they must be contained here. */
6306 expand_start_bindings (2);
6307
6308 /* Mark the corresponding BLOCK for output in its proper place. */
6309 if (TREE_OPERAND (exp, 2) != 0
6310 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6311 insert_block (TREE_OPERAND (exp, 2));
6312
6313 /* If VARS have not yet been expanded, expand them now. */
6314 while (vars)
6315 {
6316 if (DECL_RTL (vars) == 0)
6317 {
6318 vars_need_expansion = 1;
6319 expand_decl (vars);
6320 }
6321 expand_decl_init (vars);
6322 vars = TREE_CHAIN (vars);
6323 }
6324
6325 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6326
6327 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6328
6329 return temp;
6330 }
6331
6332 case RTL_EXPR:
6333 if (RTL_EXPR_SEQUENCE (exp))
6334 {
6335 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6336 abort ();
6337 emit_insns (RTL_EXPR_SEQUENCE (exp));
6338 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6339 }
6340 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6341 free_temps_for_rtl_expr (exp);
6342 return RTL_EXPR_RTL (exp);
6343
6344 case CONSTRUCTOR:
6345 /* If we don't need the result, just ensure we evaluate any
6346 subexpressions. */
6347 if (ignore)
6348 {
6349 tree elt;
6350 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6351 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6352 EXPAND_MEMORY_USE_BAD);
6353 return const0_rtx;
6354 }
6355
6356 /* All elts simple constants => refer to a constant in memory. But
6357 if this is a non-BLKmode mode, let it store a field at a time
6358 since that should make a CONST_INT or CONST_DOUBLE when we
6359 fold. Likewise, if we have a target we can use, it is best to
6360 store directly into the target unless the type is large enough
6361 that memcpy will be used. If we are making an initializer and
6362 all operands are constant, put it in memory as well. */
6363 else if ((TREE_STATIC (exp)
6364 && ((mode == BLKmode
6365 && ! (target != 0 && safe_from_p (target, exp, 1)))
6366 || TREE_ADDRESSABLE (exp)
6367 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6368 && (! MOVE_BY_PIECES_P
6369 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6370 TYPE_ALIGN (type)))
6371 && ! mostly_zeros_p (exp))))
6372 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6373 {
6374 rtx constructor = output_constant_def (exp);
6375
6376 if (modifier != EXPAND_CONST_ADDRESS
6377 && modifier != EXPAND_INITIALIZER
6378 && modifier != EXPAND_SUM
6379 && (! memory_address_p (GET_MODE (constructor),
6380 XEXP (constructor, 0))
6381 || (flag_force_addr
6382 && GET_CODE (XEXP (constructor, 0)) != REG)))
6383 constructor = change_address (constructor, VOIDmode,
6384 XEXP (constructor, 0));
6385 return constructor;
6386 }
6387
6388 else
6389 {
6390 /* Handle calls that pass values in multiple non-contiguous
6391 locations. The Irix 6 ABI has examples of this. */
6392 if (target == 0 || ! safe_from_p (target, exp, 1)
6393 || GET_CODE (target) == PARALLEL)
6394 {
6395 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6396 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6397 else
6398 target = assign_temp (type, 0, 1, 1);
6399 }
6400
6401 if (TREE_READONLY (exp))
6402 {
6403 if (GET_CODE (target) == MEM)
6404 target = copy_rtx (target);
6405
6406 RTX_UNCHANGING_P (target) = 1;
6407 }
6408
6409 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6410 int_size_in_bytes (TREE_TYPE (exp)));
6411 return target;
6412 }
6413
6414 case INDIRECT_REF:
6415 {
6416 tree exp1 = TREE_OPERAND (exp, 0);
6417 tree exp2;
6418 tree index;
6419 tree string = string_constant (exp1, &index);
6420
6421 /* Try to optimize reads from const strings. */
6422 if (string
6423 && TREE_CODE (string) == STRING_CST
6424 && TREE_CODE (index) == INTEGER_CST
6425 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6426 && GET_MODE_CLASS (mode) == MODE_INT
6427 && GET_MODE_SIZE (mode) == 1
6428 && modifier != EXPAND_MEMORY_USE_WO)
6429 return
6430 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6431
6432 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6433 op0 = memory_address (mode, op0);
6434
6435 if (cfun && current_function_check_memory_usage
6436 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6437 {
6438 enum memory_use_mode memory_usage;
6439 memory_usage = get_memory_usage_from_modifier (modifier);
6440
6441 if (memory_usage != MEMORY_USE_DONT)
6442 {
6443 in_check_memory_usage = 1;
6444 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6445 op0, Pmode,
6446 GEN_INT (int_size_in_bytes (type)),
6447 TYPE_MODE (sizetype),
6448 GEN_INT (memory_usage),
6449 TYPE_MODE (integer_type_node));
6450 in_check_memory_usage = 0;
6451 }
6452 }
6453
6454 temp = gen_rtx_MEM (mode, op0);
6455 /* If address was computed by addition,
6456 mark this as an element of an aggregate. */
6457 if (TREE_CODE (exp1) == PLUS_EXPR
6458 || (TREE_CODE (exp1) == SAVE_EXPR
6459 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6460 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6461 || (TREE_CODE (exp1) == ADDR_EXPR
6462 && (exp2 = TREE_OPERAND (exp1, 0))
6463 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6464 MEM_SET_IN_STRUCT_P (temp, 1);
6465
6466 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6467 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6468
6469 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6470 here, because, in C and C++, the fact that a location is accessed
6471 through a pointer to const does not mean that the value there can
6472 never change. Languages where it can never change should
6473 also set TREE_STATIC. */
6474 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6475
6476 /* If we are writing to this object and its type is a record with
6477 readonly fields, we must mark it as readonly so it will
6478 conflict with readonly references to those fields. */
6479 if (modifier == EXPAND_MEMORY_USE_WO
6480 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6481 RTX_UNCHANGING_P (temp) = 1;
6482
6483 return temp;
6484 }
6485
6486 case ARRAY_REF:
6487 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6488 abort ();
6489
6490 {
6491 tree array = TREE_OPERAND (exp, 0);
6492 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6493 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6494 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6495 HOST_WIDE_INT i;
6496
6497 /* Optimize the special-case of a zero lower bound.
6498
6499 We convert the low_bound to sizetype to avoid some problems
6500 with constant folding. (E.g. suppose the lower bound is 1,
6501 and its mode is QI. Without the conversion, (ARRAY
6502 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6503 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6504
6505 if (! integer_zerop (low_bound))
6506 index = size_diffop (index, convert (sizetype, low_bound));
6507
6508 /* Fold an expression like: "foo"[2].
6509 This is not done in fold so it won't happen inside &.
6510 Don't fold if this is for wide characters since it's too
6511 difficult to do correctly and this is a very rare case. */
6512
6513 if (TREE_CODE (array) == STRING_CST
6514 && TREE_CODE (index) == INTEGER_CST
6515 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6516 && GET_MODE_CLASS (mode) == MODE_INT
6517 && GET_MODE_SIZE (mode) == 1)
6518 return
6519 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6520
6521 /* If this is a constant index into a constant array,
6522 just get the value from the array. Handle both the cases when
6523 we have an explicit constructor and when our operand is a variable
6524 that was declared const. */
6525
6526 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6527 && TREE_CODE (index) == INTEGER_CST
6528 && 0 > compare_tree_int (index,
6529 list_length (CONSTRUCTOR_ELTS
6530 (TREE_OPERAND (exp, 0)))))
6531 {
6532 tree elem;
6533
6534 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6535 i = TREE_INT_CST_LOW (index);
6536 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6537 ;
6538
6539 if (elem)
6540 return expand_expr (fold (TREE_VALUE (elem)), target,
6541 tmode, ro_modifier);
6542 }
6543
6544 else if (optimize >= 1
6545 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6546 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6547 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6548 {
6549 if (TREE_CODE (index) == INTEGER_CST)
6550 {
6551 tree init = DECL_INITIAL (array);
6552
6553 if (TREE_CODE (init) == CONSTRUCTOR)
6554 {
6555 tree elem;
6556
6557 for (elem = CONSTRUCTOR_ELTS (init);
6558 (elem
6559 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6560 elem = TREE_CHAIN (elem))
6561 ;
6562
6563 if (elem)
6564 return expand_expr (fold (TREE_VALUE (elem)), target,
6565 tmode, ro_modifier);
6566 }
6567 else if (TREE_CODE (init) == STRING_CST
6568 && 0 > compare_tree_int (index,
6569 TREE_STRING_LENGTH (init)))
6570 return (GEN_INT
6571 (TREE_STRING_POINTER
6572 (init)[TREE_INT_CST_LOW (index)]));
6573 }
6574 }
6575 }
6576
6577 /* ... fall through ... */
6578
6579 case COMPONENT_REF:
6580 case BIT_FIELD_REF:
6581 /* If the operand is a CONSTRUCTOR, we can just extract the
6582 appropriate field if it is present. Don't do this if we have
6583 already written the data since we want to refer to that copy
6584 and varasm.c assumes that's what we'll do. */
6585 if (code != ARRAY_REF
6586 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6587 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6588 {
6589 tree elt;
6590
6591 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6592 elt = TREE_CHAIN (elt))
6593 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6594 /* We can normally use the value of the field in the
6595 CONSTRUCTOR. However, if this is a bitfield in
6596 an integral mode that we can fit in a HOST_WIDE_INT,
6597 we must mask only the number of bits in the bitfield,
6598 since this is done implicitly by the constructor. If
6599 the bitfield does not meet either of those conditions,
6600 we can't do this optimization. */
6601 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6602 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6603 == MODE_INT)
6604 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6605 <= HOST_BITS_PER_WIDE_INT))))
6606 {
6607 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6608 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6609 {
6610 HOST_WIDE_INT bitsize
6611 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6612
6613 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6614 {
6615 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6616 op0 = expand_and (op0, op1, target);
6617 }
6618 else
6619 {
6620 enum machine_mode imode
6621 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6622 tree count
6623 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6624 0);
6625
6626 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6627 target, 0);
6628 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6629 target, 0);
6630 }
6631 }
6632
6633 return op0;
6634 }
6635 }
6636
6637 {
6638 enum machine_mode mode1;
6639 HOST_WIDE_INT bitsize, bitpos;
6640 tree offset;
6641 int volatilep = 0;
6642 unsigned int alignment;
6643 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6644 &mode1, &unsignedp, &volatilep,
6645 &alignment);
6646
6647 /* If we got back the original object, something is wrong. Perhaps
6648 we are evaluating an expression too early. In any event, don't
6649 infinitely recurse. */
6650 if (tem == exp)
6651 abort ();
6652
6653 /* If TEM's type is a union of variable size, pass TARGET to the inner
6654 computation, since it will need a temporary and TARGET is known
6655 to have to do. This occurs in unchecked conversion in Ada. */
6656
6657 op0 = expand_expr (tem,
6658 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6659 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6660 != INTEGER_CST)
6661 ? target : NULL_RTX),
6662 VOIDmode,
6663 (modifier == EXPAND_INITIALIZER
6664 || modifier == EXPAND_CONST_ADDRESS)
6665 ? modifier : EXPAND_NORMAL);
6666
6667 /* If this is a constant, put it into a register if it is a
6668 legitimate constant and OFFSET is 0 and memory if it isn't. */
6669 if (CONSTANT_P (op0))
6670 {
6671 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6672 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6673 && offset == 0)
6674 op0 = force_reg (mode, op0);
6675 else
6676 op0 = validize_mem (force_const_mem (mode, op0));
6677 }
6678
6679 if (offset != 0)
6680 {
6681 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6682
6683 /* If this object is in memory, put it into a register.
6684 This case can't occur in C, but can in Ada if we have
6685 unchecked conversion of an expression from a scalar type to
6686 an array or record type. */
6687 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6688 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6689 {
6690 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6691
6692 mark_temp_addr_taken (memloc);
6693 emit_move_insn (memloc, op0);
6694 op0 = memloc;
6695 }
6696
6697 if (GET_CODE (op0) != MEM)
6698 abort ();
6699
6700 if (GET_MODE (offset_rtx) != ptr_mode)
6701 {
6702 #ifdef POINTERS_EXTEND_UNSIGNED
6703 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6704 #else
6705 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6706 #endif
6707 }
6708
6709 /* A constant address in OP0 can have VOIDmode, we must not try
6710 to call force_reg for that case. Avoid that case. */
6711 if (GET_CODE (op0) == MEM
6712 && GET_MODE (op0) == BLKmode
6713 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6714 && bitsize != 0
6715 && (bitpos % bitsize) == 0
6716 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6717 && alignment == GET_MODE_ALIGNMENT (mode1))
6718 {
6719 rtx temp = change_address (op0, mode1,
6720 plus_constant (XEXP (op0, 0),
6721 (bitpos /
6722 BITS_PER_UNIT)));
6723 if (GET_CODE (XEXP (temp, 0)) == REG)
6724 op0 = temp;
6725 else
6726 op0 = change_address (op0, mode1,
6727 force_reg (GET_MODE (XEXP (temp, 0)),
6728 XEXP (temp, 0)));
6729 bitpos = 0;
6730 }
6731
6732
6733 op0 = change_address (op0, VOIDmode,
6734 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6735 force_reg (ptr_mode,
6736 offset_rtx)));
6737 }
6738
6739 /* Don't forget about volatility even if this is a bitfield. */
6740 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6741 {
6742 op0 = copy_rtx (op0);
6743 MEM_VOLATILE_P (op0) = 1;
6744 }
6745
6746 /* Check the access. */
6747 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6748 {
6749 enum memory_use_mode memory_usage;
6750 memory_usage = get_memory_usage_from_modifier (modifier);
6751
6752 if (memory_usage != MEMORY_USE_DONT)
6753 {
6754 rtx to;
6755 int size;
6756
6757 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6758 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6759
6760 /* Check the access right of the pointer. */
6761 if (size > BITS_PER_UNIT)
6762 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6763 to, Pmode,
6764 GEN_INT (size / BITS_PER_UNIT),
6765 TYPE_MODE (sizetype),
6766 GEN_INT (memory_usage),
6767 TYPE_MODE (integer_type_node));
6768 }
6769 }
6770
6771 /* In cases where an aligned union has an unaligned object
6772 as a field, we might be extracting a BLKmode value from
6773 an integer-mode (e.g., SImode) object. Handle this case
6774 by doing the extract into an object as wide as the field
6775 (which we know to be the width of a basic mode), then
6776 storing into memory, and changing the mode to BLKmode.
6777 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6778 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6779 if (mode1 == VOIDmode
6780 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6781 || (modifier != EXPAND_CONST_ADDRESS
6782 && modifier != EXPAND_INITIALIZER
6783 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6784 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6785 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6786 /* If the field isn't aligned enough to fetch as a memref,
6787 fetch it as a bit field. */
6788 || (mode1 != BLKmode
6789 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6790 && ((TYPE_ALIGN (TREE_TYPE (tem))
6791 < GET_MODE_ALIGNMENT (mode))
6792 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6793 /* If the type and the field are a constant size and the
6794 size of the type isn't the same size as the bitfield,
6795 we must use bitfield operations. */
6796 || ((bitsize >= 0
6797 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6798 == INTEGER_CST)
6799 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6800 bitsize)))))
6801 || (modifier != EXPAND_CONST_ADDRESS
6802 && modifier != EXPAND_INITIALIZER
6803 && mode == BLKmode
6804 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6805 && (TYPE_ALIGN (type) > alignment
6806 || bitpos % TYPE_ALIGN (type) != 0)))
6807 {
6808 enum machine_mode ext_mode = mode;
6809
6810 if (ext_mode == BLKmode
6811 && ! (target != 0 && GET_CODE (op0) == MEM
6812 && GET_CODE (target) == MEM
6813 && bitpos % BITS_PER_UNIT == 0))
6814 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6815
6816 if (ext_mode == BLKmode)
6817 {
6818 /* In this case, BITPOS must start at a byte boundary and
6819 TARGET, if specified, must be a MEM. */
6820 if (GET_CODE (op0) != MEM
6821 || (target != 0 && GET_CODE (target) != MEM)
6822 || bitpos % BITS_PER_UNIT != 0)
6823 abort ();
6824
6825 op0 = change_address (op0, VOIDmode,
6826 plus_constant (XEXP (op0, 0),
6827 bitpos / BITS_PER_UNIT));
6828 if (target == 0)
6829 target = assign_temp (type, 0, 1, 1);
6830
6831 emit_block_move (target, op0,
6832 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6833 / BITS_PER_UNIT),
6834 BITS_PER_UNIT);
6835
6836 return target;
6837 }
6838
6839 op0 = validize_mem (op0);
6840
6841 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6842 mark_reg_pointer (XEXP (op0, 0), alignment);
6843
6844 op0 = extract_bit_field (op0, bitsize, bitpos,
6845 unsignedp, target, ext_mode, ext_mode,
6846 alignment,
6847 int_size_in_bytes (TREE_TYPE (tem)));
6848
6849 /* If the result is a record type and BITSIZE is narrower than
6850 the mode of OP0, an integral mode, and this is a big endian
6851 machine, we must put the field into the high-order bits. */
6852 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6853 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6854 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6855 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6856 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6857 - bitsize),
6858 op0, 1);
6859
6860 if (mode == BLKmode)
6861 {
6862 rtx new = assign_stack_temp (ext_mode,
6863 bitsize / BITS_PER_UNIT, 0);
6864
6865 emit_move_insn (new, op0);
6866 op0 = copy_rtx (new);
6867 PUT_MODE (op0, BLKmode);
6868 MEM_SET_IN_STRUCT_P (op0, 1);
6869 }
6870
6871 return op0;
6872 }
6873
6874 /* If the result is BLKmode, use that to access the object
6875 now as well. */
6876 if (mode == BLKmode)
6877 mode1 = BLKmode;
6878
6879 /* Get a reference to just this component. */
6880 if (modifier == EXPAND_CONST_ADDRESS
6881 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6882 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6883 (bitpos / BITS_PER_UNIT)));
6884 else
6885 op0 = change_address (op0, mode1,
6886 plus_constant (XEXP (op0, 0),
6887 (bitpos / BITS_PER_UNIT)));
6888
6889 if (GET_CODE (op0) == MEM)
6890 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6891
6892 if (GET_CODE (XEXP (op0, 0)) == REG)
6893 mark_reg_pointer (XEXP (op0, 0), alignment);
6894
6895 MEM_SET_IN_STRUCT_P (op0, 1);
6896 MEM_VOLATILE_P (op0) |= volatilep;
6897 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6898 || modifier == EXPAND_CONST_ADDRESS
6899 || modifier == EXPAND_INITIALIZER)
6900 return op0;
6901 else if (target == 0)
6902 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6903
6904 convert_move (target, op0, unsignedp);
6905 return target;
6906 }
6907
6908 /* Intended for a reference to a buffer of a file-object in Pascal.
6909 But it's not certain that a special tree code will really be
6910 necessary for these. INDIRECT_REF might work for them. */
6911 case BUFFER_REF:
6912 abort ();
6913
6914 case IN_EXPR:
6915 {
6916 /* Pascal set IN expression.
6917
6918 Algorithm:
6919 rlo = set_low - (set_low%bits_per_word);
6920 the_word = set [ (index - rlo)/bits_per_word ];
6921 bit_index = index % bits_per_word;
6922 bitmask = 1 << bit_index;
6923 return !!(the_word & bitmask); */
6924
6925 tree set = TREE_OPERAND (exp, 0);
6926 tree index = TREE_OPERAND (exp, 1);
6927 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6928 tree set_type = TREE_TYPE (set);
6929 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6930 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6931 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6932 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6933 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6934 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6935 rtx setaddr = XEXP (setval, 0);
6936 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6937 rtx rlow;
6938 rtx diff, quo, rem, addr, bit, result;
6939
6940 preexpand_calls (exp);
6941
6942 /* If domain is empty, answer is no. Likewise if index is constant
6943 and out of bounds. */
6944 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6945 && TREE_CODE (set_low_bound) == INTEGER_CST
6946 && tree_int_cst_lt (set_high_bound, set_low_bound))
6947 || (TREE_CODE (index) == INTEGER_CST
6948 && TREE_CODE (set_low_bound) == INTEGER_CST
6949 && tree_int_cst_lt (index, set_low_bound))
6950 || (TREE_CODE (set_high_bound) == INTEGER_CST
6951 && TREE_CODE (index) == INTEGER_CST
6952 && tree_int_cst_lt (set_high_bound, index))))
6953 return const0_rtx;
6954
6955 if (target == 0)
6956 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6957
6958 /* If we get here, we have to generate the code for both cases
6959 (in range and out of range). */
6960
6961 op0 = gen_label_rtx ();
6962 op1 = gen_label_rtx ();
6963
6964 if (! (GET_CODE (index_val) == CONST_INT
6965 && GET_CODE (lo_r) == CONST_INT))
6966 {
6967 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6968 GET_MODE (index_val), iunsignedp, 0, op1);
6969 }
6970
6971 if (! (GET_CODE (index_val) == CONST_INT
6972 && GET_CODE (hi_r) == CONST_INT))
6973 {
6974 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6975 GET_MODE (index_val), iunsignedp, 0, op1);
6976 }
6977
6978 /* Calculate the element number of bit zero in the first word
6979 of the set. */
6980 if (GET_CODE (lo_r) == CONST_INT)
6981 rlow = GEN_INT (INTVAL (lo_r)
6982 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6983 else
6984 rlow = expand_binop (index_mode, and_optab, lo_r,
6985 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6986 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6987
6988 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6989 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6990
6991 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6992 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6993 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6994 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6995
6996 addr = memory_address (byte_mode,
6997 expand_binop (index_mode, add_optab, diff,
6998 setaddr, NULL_RTX, iunsignedp,
6999 OPTAB_LIB_WIDEN));
7000
7001 /* Extract the bit we want to examine */
7002 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7003 gen_rtx_MEM (byte_mode, addr),
7004 make_tree (TREE_TYPE (index), rem),
7005 NULL_RTX, 1);
7006 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7007 GET_MODE (target) == byte_mode ? target : 0,
7008 1, OPTAB_LIB_WIDEN);
7009
7010 if (result != target)
7011 convert_move (target, result, 1);
7012
7013 /* Output the code to handle the out-of-range case. */
7014 emit_jump (op0);
7015 emit_label (op1);
7016 emit_move_insn (target, const0_rtx);
7017 emit_label (op0);
7018 return target;
7019 }
7020
7021 case WITH_CLEANUP_EXPR:
7022 if (RTL_EXPR_RTL (exp) == 0)
7023 {
7024 RTL_EXPR_RTL (exp)
7025 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7026 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7027
7028 /* That's it for this cleanup. */
7029 TREE_OPERAND (exp, 2) = 0;
7030 }
7031 return RTL_EXPR_RTL (exp);
7032
7033 case CLEANUP_POINT_EXPR:
7034 {
7035 /* Start a new binding layer that will keep track of all cleanup
7036 actions to be performed. */
7037 expand_start_bindings (2);
7038
7039 target_temp_slot_level = temp_slot_level;
7040
7041 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7042 /* If we're going to use this value, load it up now. */
7043 if (! ignore)
7044 op0 = force_not_mem (op0);
7045 preserve_temp_slots (op0);
7046 expand_end_bindings (NULL_TREE, 0, 0);
7047 }
7048 return op0;
7049
7050 case CALL_EXPR:
7051 /* Check for a built-in function. */
7052 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7053 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7054 == FUNCTION_DECL)
7055 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7056 return expand_builtin (exp, target, subtarget, tmode, ignore);
7057
7058 /* If this call was expanded already by preexpand_calls,
7059 just return the result we got. */
7060 if (CALL_EXPR_RTL (exp) != 0)
7061 return CALL_EXPR_RTL (exp);
7062
7063 return expand_call (exp, target, ignore);
7064
7065 case NON_LVALUE_EXPR:
7066 case NOP_EXPR:
7067 case CONVERT_EXPR:
7068 case REFERENCE_EXPR:
7069 if (TREE_CODE (type) == UNION_TYPE)
7070 {
7071 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7072
7073 /* If both input and output are BLKmode, this conversion
7074 isn't actually doing anything unless we need to make the
7075 alignment stricter. */
7076 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7077 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7078 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7079 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7080 modifier);
7081
7082 if (target == 0)
7083 {
7084 if (mode != BLKmode)
7085 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7086 else
7087 target = assign_temp (type, 0, 1, 1);
7088 }
7089
7090 if (GET_CODE (target) == MEM)
7091 /* Store data into beginning of memory target. */
7092 store_expr (TREE_OPERAND (exp, 0),
7093 change_address (target, TYPE_MODE (valtype), 0), 0);
7094
7095 else if (GET_CODE (target) == REG)
7096 /* Store this field into a union of the proper type. */
7097 store_field (target,
7098 MIN ((int_size_in_bytes (TREE_TYPE
7099 (TREE_OPERAND (exp, 0)))
7100 * BITS_PER_UNIT),
7101 GET_MODE_BITSIZE (mode)),
7102 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7103 VOIDmode, 0, BITS_PER_UNIT,
7104 int_size_in_bytes (type), 0);
7105 else
7106 abort ();
7107
7108 /* Return the entire union. */
7109 return target;
7110 }
7111
7112 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7113 {
7114 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7115 ro_modifier);
7116
7117 /* If the signedness of the conversion differs and OP0 is
7118 a promoted SUBREG, clear that indication since we now
7119 have to do the proper extension. */
7120 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7121 && GET_CODE (op0) == SUBREG)
7122 SUBREG_PROMOTED_VAR_P (op0) = 0;
7123
7124 return op0;
7125 }
7126
7127 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7128 if (GET_MODE (op0) == mode)
7129 return op0;
7130
7131 /* If OP0 is a constant, just convert it into the proper mode. */
7132 if (CONSTANT_P (op0))
7133 return
7134 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7135 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7136
7137 if (modifier == EXPAND_INITIALIZER)
7138 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7139
7140 if (target == 0)
7141 return
7142 convert_to_mode (mode, op0,
7143 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7144 else
7145 convert_move (target, op0,
7146 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7147 return target;
7148
7149 case PLUS_EXPR:
7150 /* We come here from MINUS_EXPR when the second operand is a
7151 constant. */
7152 plus_expr:
7153 this_optab = add_optab;
7154
7155 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7156 something else, make sure we add the register to the constant and
7157 then to the other thing. This case can occur during strength
7158 reduction and doing it this way will produce better code if the
7159 frame pointer or argument pointer is eliminated.
7160
7161 fold-const.c will ensure that the constant is always in the inner
7162 PLUS_EXPR, so the only case we need to do anything about is if
7163 sp, ap, or fp is our second argument, in which case we must swap
7164 the innermost first argument and our second argument. */
7165
7166 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7167 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7168 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7169 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7170 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7171 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7172 {
7173 tree t = TREE_OPERAND (exp, 1);
7174
7175 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7176 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7177 }
7178
7179 /* If the result is to be ptr_mode and we are adding an integer to
7180 something, we might be forming a constant. So try to use
7181 plus_constant. If it produces a sum and we can't accept it,
7182 use force_operand. This allows P = &ARR[const] to generate
7183 efficient code on machines where a SYMBOL_REF is not a valid
7184 address.
7185
7186 If this is an EXPAND_SUM call, always return the sum. */
7187 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7188 || mode == ptr_mode)
7189 {
7190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7191 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7192 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7193 {
7194 rtx constant_part;
7195
7196 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7197 EXPAND_SUM);
7198 /* Use immed_double_const to ensure that the constant is
7199 truncated according to the mode of OP1, then sign extended
7200 to a HOST_WIDE_INT. Using the constant directly can result
7201 in non-canonical RTL in a 64x32 cross compile. */
7202 constant_part
7203 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7204 (HOST_WIDE_INT) 0,
7205 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7206 op1 = plus_constant (op1, INTVAL (constant_part));
7207 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7208 op1 = force_operand (op1, target);
7209 return op1;
7210 }
7211
7212 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7213 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7214 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7215 {
7216 rtx constant_part;
7217
7218 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7219 EXPAND_SUM);
7220 if (! CONSTANT_P (op0))
7221 {
7222 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7223 VOIDmode, modifier);
7224 /* Don't go to both_summands if modifier
7225 says it's not right to return a PLUS. */
7226 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7227 goto binop2;
7228 goto both_summands;
7229 }
7230 /* Use immed_double_const to ensure that the constant is
7231 truncated according to the mode of OP1, then sign extended
7232 to a HOST_WIDE_INT. Using the constant directly can result
7233 in non-canonical RTL in a 64x32 cross compile. */
7234 constant_part
7235 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7236 (HOST_WIDE_INT) 0,
7237 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7238 op0 = plus_constant (op0, INTVAL (constant_part));
7239 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7240 op0 = force_operand (op0, target);
7241 return op0;
7242 }
7243 }
7244
7245 /* No sense saving up arithmetic to be done
7246 if it's all in the wrong mode to form part of an address.
7247 And force_operand won't know whether to sign-extend or
7248 zero-extend. */
7249 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7250 || mode != ptr_mode)
7251 goto binop;
7252
7253 preexpand_calls (exp);
7254 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7255 subtarget = 0;
7256
7257 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7258 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7259
7260 both_summands:
7261 /* Make sure any term that's a sum with a constant comes last. */
7262 if (GET_CODE (op0) == PLUS
7263 && CONSTANT_P (XEXP (op0, 1)))
7264 {
7265 temp = op0;
7266 op0 = op1;
7267 op1 = temp;
7268 }
7269 /* If adding to a sum including a constant,
7270 associate it to put the constant outside. */
7271 if (GET_CODE (op1) == PLUS
7272 && CONSTANT_P (XEXP (op1, 1)))
7273 {
7274 rtx constant_term = const0_rtx;
7275
7276 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7277 if (temp != 0)
7278 op0 = temp;
7279 /* Ensure that MULT comes first if there is one. */
7280 else if (GET_CODE (op0) == MULT)
7281 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7282 else
7283 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7284
7285 /* Let's also eliminate constants from op0 if possible. */
7286 op0 = eliminate_constant_term (op0, &constant_term);
7287
7288 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7289 their sum should be a constant. Form it into OP1, since the
7290 result we want will then be OP0 + OP1. */
7291
7292 temp = simplify_binary_operation (PLUS, mode, constant_term,
7293 XEXP (op1, 1));
7294 if (temp != 0)
7295 op1 = temp;
7296 else
7297 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7298 }
7299
7300 /* Put a constant term last and put a multiplication first. */
7301 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7302 temp = op1, op1 = op0, op0 = temp;
7303
7304 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7305 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7306
7307 case MINUS_EXPR:
7308 /* For initializers, we are allowed to return a MINUS of two
7309 symbolic constants. Here we handle all cases when both operands
7310 are constant. */
7311 /* Handle difference of two symbolic constants,
7312 for the sake of an initializer. */
7313 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7314 && really_constant_p (TREE_OPERAND (exp, 0))
7315 && really_constant_p (TREE_OPERAND (exp, 1)))
7316 {
7317 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7318 VOIDmode, ro_modifier);
7319 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7320 VOIDmode, ro_modifier);
7321
7322 /* If the last operand is a CONST_INT, use plus_constant of
7323 the negated constant. Else make the MINUS. */
7324 if (GET_CODE (op1) == CONST_INT)
7325 return plus_constant (op0, - INTVAL (op1));
7326 else
7327 return gen_rtx_MINUS (mode, op0, op1);
7328 }
7329 /* Convert A - const to A + (-const). */
7330 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7331 {
7332 tree negated = fold (build1 (NEGATE_EXPR, type,
7333 TREE_OPERAND (exp, 1)));
7334
7335 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7336 /* If we can't negate the constant in TYPE, leave it alone and
7337 expand_binop will negate it for us. We used to try to do it
7338 here in the signed version of TYPE, but that doesn't work
7339 on POINTER_TYPEs. */;
7340 else
7341 {
7342 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7343 goto plus_expr;
7344 }
7345 }
7346 this_optab = sub_optab;
7347 goto binop;
7348
7349 case MULT_EXPR:
7350 preexpand_calls (exp);
7351 /* If first operand is constant, swap them.
7352 Thus the following special case checks need only
7353 check the second operand. */
7354 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7355 {
7356 register tree t1 = TREE_OPERAND (exp, 0);
7357 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7358 TREE_OPERAND (exp, 1) = t1;
7359 }
7360
7361 /* Attempt to return something suitable for generating an
7362 indexed address, for machines that support that. */
7363
7364 if (modifier == EXPAND_SUM && mode == ptr_mode
7365 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7366 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7367 {
7368 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7369 EXPAND_SUM);
7370
7371 /* Apply distributive law if OP0 is x+c. */
7372 if (GET_CODE (op0) == PLUS
7373 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7374 return
7375 gen_rtx_PLUS
7376 (mode,
7377 gen_rtx_MULT
7378 (mode, XEXP (op0, 0),
7379 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7380 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7381 * INTVAL (XEXP (op0, 1))));
7382
7383 if (GET_CODE (op0) != REG)
7384 op0 = force_operand (op0, NULL_RTX);
7385 if (GET_CODE (op0) != REG)
7386 op0 = copy_to_mode_reg (mode, op0);
7387
7388 return
7389 gen_rtx_MULT (mode, op0,
7390 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7391 }
7392
7393 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7394 subtarget = 0;
7395
7396 /* Check for multiplying things that have been extended
7397 from a narrower type. If this machine supports multiplying
7398 in that narrower type with a result in the desired type,
7399 do it that way, and avoid the explicit type-conversion. */
7400 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7401 && TREE_CODE (type) == INTEGER_TYPE
7402 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7403 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7404 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7405 && int_fits_type_p (TREE_OPERAND (exp, 1),
7406 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7407 /* Don't use a widening multiply if a shift will do. */
7408 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7409 > HOST_BITS_PER_WIDE_INT)
7410 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7411 ||
7412 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7413 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7414 ==
7415 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7416 /* If both operands are extended, they must either both
7417 be zero-extended or both be sign-extended. */
7418 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7419 ==
7420 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7421 {
7422 enum machine_mode innermode
7423 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7424 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7425 ? smul_widen_optab : umul_widen_optab);
7426 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7427 ? umul_widen_optab : smul_widen_optab);
7428 if (mode == GET_MODE_WIDER_MODE (innermode))
7429 {
7430 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7431 {
7432 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7433 NULL_RTX, VOIDmode, 0);
7434 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7435 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7436 VOIDmode, 0);
7437 else
7438 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7439 NULL_RTX, VOIDmode, 0);
7440 goto binop2;
7441 }
7442 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7443 && innermode == word_mode)
7444 {
7445 rtx htem;
7446 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7447 NULL_RTX, VOIDmode, 0);
7448 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7449 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7450 VOIDmode, 0);
7451 else
7452 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7453 NULL_RTX, VOIDmode, 0);
7454 temp = expand_binop (mode, other_optab, op0, op1, target,
7455 unsignedp, OPTAB_LIB_WIDEN);
7456 htem = expand_mult_highpart_adjust (innermode,
7457 gen_highpart (innermode, temp),
7458 op0, op1,
7459 gen_highpart (innermode, temp),
7460 unsignedp);
7461 emit_move_insn (gen_highpart (innermode, temp), htem);
7462 return temp;
7463 }
7464 }
7465 }
7466 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7467 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7468 return expand_mult (mode, op0, op1, target, unsignedp);
7469
7470 case TRUNC_DIV_EXPR:
7471 case FLOOR_DIV_EXPR:
7472 case CEIL_DIV_EXPR:
7473 case ROUND_DIV_EXPR:
7474 case EXACT_DIV_EXPR:
7475 preexpand_calls (exp);
7476 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7477 subtarget = 0;
7478 /* Possible optimization: compute the dividend with EXPAND_SUM
7479 then if the divisor is constant can optimize the case
7480 where some terms of the dividend have coeffs divisible by it. */
7481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7482 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7483 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7484
7485 case RDIV_EXPR:
7486 this_optab = flodiv_optab;
7487 goto binop;
7488
7489 case TRUNC_MOD_EXPR:
7490 case FLOOR_MOD_EXPR:
7491 case CEIL_MOD_EXPR:
7492 case ROUND_MOD_EXPR:
7493 preexpand_calls (exp);
7494 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7495 subtarget = 0;
7496 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7497 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7498 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7499
7500 case FIX_ROUND_EXPR:
7501 case FIX_FLOOR_EXPR:
7502 case FIX_CEIL_EXPR:
7503 abort (); /* Not used for C. */
7504
7505 case FIX_TRUNC_EXPR:
7506 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7507 if (target == 0)
7508 target = gen_reg_rtx (mode);
7509 expand_fix (target, op0, unsignedp);
7510 return target;
7511
7512 case FLOAT_EXPR:
7513 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7514 if (target == 0)
7515 target = gen_reg_rtx (mode);
7516 /* expand_float can't figure out what to do if FROM has VOIDmode.
7517 So give it the correct mode. With -O, cse will optimize this. */
7518 if (GET_MODE (op0) == VOIDmode)
7519 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7520 op0);
7521 expand_float (target, op0,
7522 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7523 return target;
7524
7525 case NEGATE_EXPR:
7526 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7527 temp = expand_unop (mode, neg_optab, op0, target, 0);
7528 if (temp == 0)
7529 abort ();
7530 return temp;
7531
7532 case ABS_EXPR:
7533 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7534
7535 /* Handle complex values specially. */
7536 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7537 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7538 return expand_complex_abs (mode, op0, target, unsignedp);
7539
7540 /* Unsigned abs is simply the operand. Testing here means we don't
7541 risk generating incorrect code below. */
7542 if (TREE_UNSIGNED (type))
7543 return op0;
7544
7545 return expand_abs (mode, op0, target,
7546 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7547
7548 case MAX_EXPR:
7549 case MIN_EXPR:
7550 target = original_target;
7551 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7552 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7553 || GET_MODE (target) != mode
7554 || (GET_CODE (target) == REG
7555 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7556 target = gen_reg_rtx (mode);
7557 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7558 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7559
7560 /* First try to do it with a special MIN or MAX instruction.
7561 If that does not win, use a conditional jump to select the proper
7562 value. */
7563 this_optab = (TREE_UNSIGNED (type)
7564 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7565 : (code == MIN_EXPR ? smin_optab : smax_optab));
7566
7567 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7568 OPTAB_WIDEN);
7569 if (temp != 0)
7570 return temp;
7571
7572 /* At this point, a MEM target is no longer useful; we will get better
7573 code without it. */
7574
7575 if (GET_CODE (target) == MEM)
7576 target = gen_reg_rtx (mode);
7577
7578 if (target != op0)
7579 emit_move_insn (target, op0);
7580
7581 op0 = gen_label_rtx ();
7582
7583 /* If this mode is an integer too wide to compare properly,
7584 compare word by word. Rely on cse to optimize constant cases. */
7585 if (GET_MODE_CLASS (mode) == MODE_INT
7586 && ! can_compare_p (GE, mode, ccp_jump))
7587 {
7588 if (code == MAX_EXPR)
7589 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7590 target, op1, NULL_RTX, op0);
7591 else
7592 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7593 op1, target, NULL_RTX, op0);
7594 }
7595 else
7596 {
7597 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7598 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7599 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7600 op0);
7601 }
7602 emit_move_insn (target, op1);
7603 emit_label (op0);
7604 return target;
7605
7606 case BIT_NOT_EXPR:
7607 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7608 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7609 if (temp == 0)
7610 abort ();
7611 return temp;
7612
7613 case FFS_EXPR:
7614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7615 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7616 if (temp == 0)
7617 abort ();
7618 return temp;
7619
7620 /* ??? Can optimize bitwise operations with one arg constant.
7621 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7622 and (a bitwise1 b) bitwise2 b (etc)
7623 but that is probably not worth while. */
7624
7625 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7626 boolean values when we want in all cases to compute both of them. In
7627 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7628 as actual zero-or-1 values and then bitwise anding. In cases where
7629 there cannot be any side effects, better code would be made by
7630 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7631 how to recognize those cases. */
7632
7633 case TRUTH_AND_EXPR:
7634 case BIT_AND_EXPR:
7635 this_optab = and_optab;
7636 goto binop;
7637
7638 case TRUTH_OR_EXPR:
7639 case BIT_IOR_EXPR:
7640 this_optab = ior_optab;
7641 goto binop;
7642
7643 case TRUTH_XOR_EXPR:
7644 case BIT_XOR_EXPR:
7645 this_optab = xor_optab;
7646 goto binop;
7647
7648 case LSHIFT_EXPR:
7649 case RSHIFT_EXPR:
7650 case LROTATE_EXPR:
7651 case RROTATE_EXPR:
7652 preexpand_calls (exp);
7653 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7654 subtarget = 0;
7655 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7656 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7657 unsignedp);
7658
7659 /* Could determine the answer when only additive constants differ. Also,
7660 the addition of one can be handled by changing the condition. */
7661 case LT_EXPR:
7662 case LE_EXPR:
7663 case GT_EXPR:
7664 case GE_EXPR:
7665 case EQ_EXPR:
7666 case NE_EXPR:
7667 case UNORDERED_EXPR:
7668 case ORDERED_EXPR:
7669 case UNLT_EXPR:
7670 case UNLE_EXPR:
7671 case UNGT_EXPR:
7672 case UNGE_EXPR:
7673 case UNEQ_EXPR:
7674 preexpand_calls (exp);
7675 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7676 if (temp != 0)
7677 return temp;
7678
7679 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7680 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7681 && original_target
7682 && GET_CODE (original_target) == REG
7683 && (GET_MODE (original_target)
7684 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7685 {
7686 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7687 VOIDmode, 0);
7688
7689 if (temp != original_target)
7690 temp = copy_to_reg (temp);
7691
7692 op1 = gen_label_rtx ();
7693 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7694 GET_MODE (temp), unsignedp, 0, op1);
7695 emit_move_insn (temp, const1_rtx);
7696 emit_label (op1);
7697 return temp;
7698 }
7699
7700 /* If no set-flag instruction, must generate a conditional
7701 store into a temporary variable. Drop through
7702 and handle this like && and ||. */
7703
7704 case TRUTH_ANDIF_EXPR:
7705 case TRUTH_ORIF_EXPR:
7706 if (! ignore
7707 && (target == 0 || ! safe_from_p (target, exp, 1)
7708 /* Make sure we don't have a hard reg (such as function's return
7709 value) live across basic blocks, if not optimizing. */
7710 || (!optimize && GET_CODE (target) == REG
7711 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7712 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7713
7714 if (target)
7715 emit_clr_insn (target);
7716
7717 op1 = gen_label_rtx ();
7718 jumpifnot (exp, op1);
7719
7720 if (target)
7721 emit_0_to_1_insn (target);
7722
7723 emit_label (op1);
7724 return ignore ? const0_rtx : target;
7725
7726 case TRUTH_NOT_EXPR:
7727 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7728 /* The parser is careful to generate TRUTH_NOT_EXPR
7729 only with operands that are always zero or one. */
7730 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7731 target, 1, OPTAB_LIB_WIDEN);
7732 if (temp == 0)
7733 abort ();
7734 return temp;
7735
7736 case COMPOUND_EXPR:
7737 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7738 emit_queue ();
7739 return expand_expr (TREE_OPERAND (exp, 1),
7740 (ignore ? const0_rtx : target),
7741 VOIDmode, 0);
7742
7743 case COND_EXPR:
7744 /* If we would have a "singleton" (see below) were it not for a
7745 conversion in each arm, bring that conversion back out. */
7746 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7747 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7748 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7749 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7750 {
7751 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7752 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7753
7754 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7755 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7756 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7757 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7758 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7759 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7760 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7761 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7762 return expand_expr (build1 (NOP_EXPR, type,
7763 build (COND_EXPR, TREE_TYPE (true),
7764 TREE_OPERAND (exp, 0),
7765 true, false)),
7766 target, tmode, modifier);
7767 }
7768
7769 {
7770 /* Note that COND_EXPRs whose type is a structure or union
7771 are required to be constructed to contain assignments of
7772 a temporary variable, so that we can evaluate them here
7773 for side effect only. If type is void, we must do likewise. */
7774
7775 /* If an arm of the branch requires a cleanup,
7776 only that cleanup is performed. */
7777
7778 tree singleton = 0;
7779 tree binary_op = 0, unary_op = 0;
7780
7781 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7782 convert it to our mode, if necessary. */
7783 if (integer_onep (TREE_OPERAND (exp, 1))
7784 && integer_zerop (TREE_OPERAND (exp, 2))
7785 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7786 {
7787 if (ignore)
7788 {
7789 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7790 ro_modifier);
7791 return const0_rtx;
7792 }
7793
7794 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7795 if (GET_MODE (op0) == mode)
7796 return op0;
7797
7798 if (target == 0)
7799 target = gen_reg_rtx (mode);
7800 convert_move (target, op0, unsignedp);
7801 return target;
7802 }
7803
7804 /* Check for X ? A + B : A. If we have this, we can copy A to the
7805 output and conditionally add B. Similarly for unary operations.
7806 Don't do this if X has side-effects because those side effects
7807 might affect A or B and the "?" operation is a sequence point in
7808 ANSI. (operand_equal_p tests for side effects.) */
7809
7810 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7811 && operand_equal_p (TREE_OPERAND (exp, 2),
7812 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7813 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7814 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7815 && operand_equal_p (TREE_OPERAND (exp, 1),
7816 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7817 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7818 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7819 && operand_equal_p (TREE_OPERAND (exp, 2),
7820 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7821 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7822 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7823 && operand_equal_p (TREE_OPERAND (exp, 1),
7824 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7825 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7826
7827 /* If we are not to produce a result, we have no target. Otherwise,
7828 if a target was specified use it; it will not be used as an
7829 intermediate target unless it is safe. If no target, use a
7830 temporary. */
7831
7832 if (ignore)
7833 temp = 0;
7834 else if (original_target
7835 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7836 || (singleton && GET_CODE (original_target) == REG
7837 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7838 && original_target == var_rtx (singleton)))
7839 && GET_MODE (original_target) == mode
7840 #ifdef HAVE_conditional_move
7841 && (! can_conditionally_move_p (mode)
7842 || GET_CODE (original_target) == REG
7843 || TREE_ADDRESSABLE (type))
7844 #endif
7845 && ! (GET_CODE (original_target) == MEM
7846 && MEM_VOLATILE_P (original_target)))
7847 temp = original_target;
7848 else if (TREE_ADDRESSABLE (type))
7849 abort ();
7850 else
7851 temp = assign_temp (type, 0, 0, 1);
7852
7853 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7854 do the test of X as a store-flag operation, do this as
7855 A + ((X != 0) << log C). Similarly for other simple binary
7856 operators. Only do for C == 1 if BRANCH_COST is low. */
7857 if (temp && singleton && binary_op
7858 && (TREE_CODE (binary_op) == PLUS_EXPR
7859 || TREE_CODE (binary_op) == MINUS_EXPR
7860 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7861 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7862 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7863 : integer_onep (TREE_OPERAND (binary_op, 1)))
7864 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7865 {
7866 rtx result;
7867 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7868 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7869 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7870 : xor_optab);
7871
7872 /* If we had X ? A : A + 1, do this as A + (X == 0).
7873
7874 We have to invert the truth value here and then put it
7875 back later if do_store_flag fails. We cannot simply copy
7876 TREE_OPERAND (exp, 0) to another variable and modify that
7877 because invert_truthvalue can modify the tree pointed to
7878 by its argument. */
7879 if (singleton == TREE_OPERAND (exp, 1))
7880 TREE_OPERAND (exp, 0)
7881 = invert_truthvalue (TREE_OPERAND (exp, 0));
7882
7883 result = do_store_flag (TREE_OPERAND (exp, 0),
7884 (safe_from_p (temp, singleton, 1)
7885 ? temp : NULL_RTX),
7886 mode, BRANCH_COST <= 1);
7887
7888 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7889 result = expand_shift (LSHIFT_EXPR, mode, result,
7890 build_int_2 (tree_log2
7891 (TREE_OPERAND
7892 (binary_op, 1)),
7893 0),
7894 (safe_from_p (temp, singleton, 1)
7895 ? temp : NULL_RTX), 0);
7896
7897 if (result)
7898 {
7899 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7900 return expand_binop (mode, boptab, op1, result, temp,
7901 unsignedp, OPTAB_LIB_WIDEN);
7902 }
7903 else if (singleton == TREE_OPERAND (exp, 1))
7904 TREE_OPERAND (exp, 0)
7905 = invert_truthvalue (TREE_OPERAND (exp, 0));
7906 }
7907
7908 do_pending_stack_adjust ();
7909 NO_DEFER_POP;
7910 op0 = gen_label_rtx ();
7911
7912 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7913 {
7914 if (temp != 0)
7915 {
7916 /* If the target conflicts with the other operand of the
7917 binary op, we can't use it. Also, we can't use the target
7918 if it is a hard register, because evaluating the condition
7919 might clobber it. */
7920 if ((binary_op
7921 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7922 || (GET_CODE (temp) == REG
7923 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7924 temp = gen_reg_rtx (mode);
7925 store_expr (singleton, temp, 0);
7926 }
7927 else
7928 expand_expr (singleton,
7929 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7930 if (singleton == TREE_OPERAND (exp, 1))
7931 jumpif (TREE_OPERAND (exp, 0), op0);
7932 else
7933 jumpifnot (TREE_OPERAND (exp, 0), op0);
7934
7935 start_cleanup_deferral ();
7936 if (binary_op && temp == 0)
7937 /* Just touch the other operand. */
7938 expand_expr (TREE_OPERAND (binary_op, 1),
7939 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7940 else if (binary_op)
7941 store_expr (build (TREE_CODE (binary_op), type,
7942 make_tree (type, temp),
7943 TREE_OPERAND (binary_op, 1)),
7944 temp, 0);
7945 else
7946 store_expr (build1 (TREE_CODE (unary_op), type,
7947 make_tree (type, temp)),
7948 temp, 0);
7949 op1 = op0;
7950 }
7951 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7952 comparison operator. If we have one of these cases, set the
7953 output to A, branch on A (cse will merge these two references),
7954 then set the output to FOO. */
7955 else if (temp
7956 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7957 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7958 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7959 TREE_OPERAND (exp, 1), 0)
7960 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7961 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7962 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7963 {
7964 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7965 temp = gen_reg_rtx (mode);
7966 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7967 jumpif (TREE_OPERAND (exp, 0), op0);
7968
7969 start_cleanup_deferral ();
7970 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7971 op1 = op0;
7972 }
7973 else if (temp
7974 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7975 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7976 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7977 TREE_OPERAND (exp, 2), 0)
7978 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7979 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7980 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7981 {
7982 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7983 temp = gen_reg_rtx (mode);
7984 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7985 jumpifnot (TREE_OPERAND (exp, 0), op0);
7986
7987 start_cleanup_deferral ();
7988 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7989 op1 = op0;
7990 }
7991 else
7992 {
7993 op1 = gen_label_rtx ();
7994 jumpifnot (TREE_OPERAND (exp, 0), op0);
7995
7996 start_cleanup_deferral ();
7997
7998 /* One branch of the cond can be void, if it never returns. For
7999 example A ? throw : E */
8000 if (temp != 0
8001 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8002 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8003 else
8004 expand_expr (TREE_OPERAND (exp, 1),
8005 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8006 end_cleanup_deferral ();
8007 emit_queue ();
8008 emit_jump_insn (gen_jump (op1));
8009 emit_barrier ();
8010 emit_label (op0);
8011 start_cleanup_deferral ();
8012 if (temp != 0
8013 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8014 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8015 else
8016 expand_expr (TREE_OPERAND (exp, 2),
8017 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8018 }
8019
8020 end_cleanup_deferral ();
8021
8022 emit_queue ();
8023 emit_label (op1);
8024 OK_DEFER_POP;
8025
8026 return temp;
8027 }
8028
8029 case TARGET_EXPR:
8030 {
8031 /* Something needs to be initialized, but we didn't know
8032 where that thing was when building the tree. For example,
8033 it could be the return value of a function, or a parameter
8034 to a function which lays down in the stack, or a temporary
8035 variable which must be passed by reference.
8036
8037 We guarantee that the expression will either be constructed
8038 or copied into our original target. */
8039
8040 tree slot = TREE_OPERAND (exp, 0);
8041 tree cleanups = NULL_TREE;
8042 tree exp1;
8043
8044 if (TREE_CODE (slot) != VAR_DECL)
8045 abort ();
8046
8047 if (! ignore)
8048 target = original_target;
8049
8050 /* Set this here so that if we get a target that refers to a
8051 register variable that's already been used, put_reg_into_stack
8052 knows that it should fix up those uses. */
8053 TREE_USED (slot) = 1;
8054
8055 if (target == 0)
8056 {
8057 if (DECL_RTL (slot) != 0)
8058 {
8059 target = DECL_RTL (slot);
8060 /* If we have already expanded the slot, so don't do
8061 it again. (mrs) */
8062 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8063 return target;
8064 }
8065 else
8066 {
8067 target = assign_temp (type, 2, 0, 1);
8068 /* All temp slots at this level must not conflict. */
8069 preserve_temp_slots (target);
8070 DECL_RTL (slot) = target;
8071 if (TREE_ADDRESSABLE (slot))
8072 {
8073 TREE_ADDRESSABLE (slot) = 0;
8074 mark_addressable (slot);
8075 }
8076
8077 /* Since SLOT is not known to the called function
8078 to belong to its stack frame, we must build an explicit
8079 cleanup. This case occurs when we must build up a reference
8080 to pass the reference as an argument. In this case,
8081 it is very likely that such a reference need not be
8082 built here. */
8083
8084 if (TREE_OPERAND (exp, 2) == 0)
8085 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8086 cleanups = TREE_OPERAND (exp, 2);
8087 }
8088 }
8089 else
8090 {
8091 /* This case does occur, when expanding a parameter which
8092 needs to be constructed on the stack. The target
8093 is the actual stack address that we want to initialize.
8094 The function we call will perform the cleanup in this case. */
8095
8096 /* If we have already assigned it space, use that space,
8097 not target that we were passed in, as our target
8098 parameter is only a hint. */
8099 if (DECL_RTL (slot) != 0)
8100 {
8101 target = DECL_RTL (slot);
8102 /* If we have already expanded the slot, so don't do
8103 it again. (mrs) */
8104 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8105 return target;
8106 }
8107 else
8108 {
8109 DECL_RTL (slot) = target;
8110 /* If we must have an addressable slot, then make sure that
8111 the RTL that we just stored in slot is OK. */
8112 if (TREE_ADDRESSABLE (slot))
8113 {
8114 TREE_ADDRESSABLE (slot) = 0;
8115 mark_addressable (slot);
8116 }
8117 }
8118 }
8119
8120 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8121 /* Mark it as expanded. */
8122 TREE_OPERAND (exp, 1) = NULL_TREE;
8123
8124 store_expr (exp1, target, 0);
8125
8126 expand_decl_cleanup (NULL_TREE, cleanups);
8127
8128 return target;
8129 }
8130
8131 case INIT_EXPR:
8132 {
8133 tree lhs = TREE_OPERAND (exp, 0);
8134 tree rhs = TREE_OPERAND (exp, 1);
8135 tree noncopied_parts = 0;
8136 tree lhs_type = TREE_TYPE (lhs);
8137
8138 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8139 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8140 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8141 TYPE_NONCOPIED_PARTS (lhs_type));
8142 while (noncopied_parts != 0)
8143 {
8144 expand_assignment (TREE_VALUE (noncopied_parts),
8145 TREE_PURPOSE (noncopied_parts), 0, 0);
8146 noncopied_parts = TREE_CHAIN (noncopied_parts);
8147 }
8148 return temp;
8149 }
8150
8151 case MODIFY_EXPR:
8152 {
8153 /* If lhs is complex, expand calls in rhs before computing it.
8154 That's so we don't compute a pointer and save it over a call.
8155 If lhs is simple, compute it first so we can give it as a
8156 target if the rhs is just a call. This avoids an extra temp and copy
8157 and that prevents a partial-subsumption which makes bad code.
8158 Actually we could treat component_ref's of vars like vars. */
8159
8160 tree lhs = TREE_OPERAND (exp, 0);
8161 tree rhs = TREE_OPERAND (exp, 1);
8162 tree noncopied_parts = 0;
8163 tree lhs_type = TREE_TYPE (lhs);
8164
8165 temp = 0;
8166
8167 if (TREE_CODE (lhs) != VAR_DECL
8168 && TREE_CODE (lhs) != RESULT_DECL
8169 && TREE_CODE (lhs) != PARM_DECL
8170 && ! (TREE_CODE (lhs) == INDIRECT_REF
8171 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8172 preexpand_calls (exp);
8173
8174 /* Check for |= or &= of a bitfield of size one into another bitfield
8175 of size 1. In this case, (unless we need the result of the
8176 assignment) we can do this more efficiently with a
8177 test followed by an assignment, if necessary.
8178
8179 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8180 things change so we do, this code should be enhanced to
8181 support it. */
8182 if (ignore
8183 && TREE_CODE (lhs) == COMPONENT_REF
8184 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8185 || TREE_CODE (rhs) == BIT_AND_EXPR)
8186 && TREE_OPERAND (rhs, 0) == lhs
8187 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8188 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8189 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8190 {
8191 rtx label = gen_label_rtx ();
8192
8193 do_jump (TREE_OPERAND (rhs, 1),
8194 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8195 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8196 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8197 (TREE_CODE (rhs) == BIT_IOR_EXPR
8198 ? integer_one_node
8199 : integer_zero_node)),
8200 0, 0);
8201 do_pending_stack_adjust ();
8202 emit_label (label);
8203 return const0_rtx;
8204 }
8205
8206 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8207 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8208 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8209 TYPE_NONCOPIED_PARTS (lhs_type));
8210
8211 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8212 while (noncopied_parts != 0)
8213 {
8214 expand_assignment (TREE_PURPOSE (noncopied_parts),
8215 TREE_VALUE (noncopied_parts), 0, 0);
8216 noncopied_parts = TREE_CHAIN (noncopied_parts);
8217 }
8218 return temp;
8219 }
8220
8221 case RETURN_EXPR:
8222 if (!TREE_OPERAND (exp, 0))
8223 expand_null_return ();
8224 else
8225 expand_return (TREE_OPERAND (exp, 0));
8226 return const0_rtx;
8227
8228 case PREINCREMENT_EXPR:
8229 case PREDECREMENT_EXPR:
8230 return expand_increment (exp, 0, ignore);
8231
8232 case POSTINCREMENT_EXPR:
8233 case POSTDECREMENT_EXPR:
8234 /* Faster to treat as pre-increment if result is not used. */
8235 return expand_increment (exp, ! ignore, ignore);
8236
8237 case ADDR_EXPR:
8238 /* If nonzero, TEMP will be set to the address of something that might
8239 be a MEM corresponding to a stack slot. */
8240 temp = 0;
8241
8242 /* Are we taking the address of a nested function? */
8243 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8244 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8245 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8246 && ! TREE_STATIC (exp))
8247 {
8248 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8249 op0 = force_operand (op0, target);
8250 }
8251 /* If we are taking the address of something erroneous, just
8252 return a zero. */
8253 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8254 return const0_rtx;
8255 else
8256 {
8257 /* We make sure to pass const0_rtx down if we came in with
8258 ignore set, to avoid doing the cleanups twice for something. */
8259 op0 = expand_expr (TREE_OPERAND (exp, 0),
8260 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8261 (modifier == EXPAND_INITIALIZER
8262 ? modifier : EXPAND_CONST_ADDRESS));
8263
8264 /* If we are going to ignore the result, OP0 will have been set
8265 to const0_rtx, so just return it. Don't get confused and
8266 think we are taking the address of the constant. */
8267 if (ignore)
8268 return op0;
8269
8270 op0 = protect_from_queue (op0, 0);
8271
8272 /* We would like the object in memory. If it is a constant, we can
8273 have it be statically allocated into memory. For a non-constant,
8274 we need to allocate some memory and store the value into it. */
8275
8276 if (CONSTANT_P (op0))
8277 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8278 op0);
8279 else if (GET_CODE (op0) == MEM)
8280 {
8281 mark_temp_addr_taken (op0);
8282 temp = XEXP (op0, 0);
8283 }
8284
8285 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8286 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8287 {
8288 /* If this object is in a register, it must be not
8289 be BLKmode. */
8290 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8291 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8292
8293 mark_temp_addr_taken (memloc);
8294 emit_move_insn (memloc, op0);
8295 op0 = memloc;
8296 }
8297
8298 if (GET_CODE (op0) != MEM)
8299 abort ();
8300
8301 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8302 {
8303 temp = XEXP (op0, 0);
8304 #ifdef POINTERS_EXTEND_UNSIGNED
8305 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8306 && mode == ptr_mode)
8307 temp = convert_memory_address (ptr_mode, temp);
8308 #endif
8309 return temp;
8310 }
8311
8312 op0 = force_operand (XEXP (op0, 0), target);
8313 }
8314
8315 if (flag_force_addr && GET_CODE (op0) != REG)
8316 op0 = force_reg (Pmode, op0);
8317
8318 if (GET_CODE (op0) == REG
8319 && ! REG_USERVAR_P (op0))
8320 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8321
8322 /* If we might have had a temp slot, add an equivalent address
8323 for it. */
8324 if (temp != 0)
8325 update_temp_slot_address (temp, op0);
8326
8327 #ifdef POINTERS_EXTEND_UNSIGNED
8328 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8329 && mode == ptr_mode)
8330 op0 = convert_memory_address (ptr_mode, op0);
8331 #endif
8332
8333 return op0;
8334
8335 case ENTRY_VALUE_EXPR:
8336 abort ();
8337
8338 /* COMPLEX type for Extended Pascal & Fortran */
8339 case COMPLEX_EXPR:
8340 {
8341 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8342 rtx insns;
8343
8344 /* Get the rtx code of the operands. */
8345 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8346 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8347
8348 if (! target)
8349 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8350
8351 start_sequence ();
8352
8353 /* Move the real (op0) and imaginary (op1) parts to their location. */
8354 emit_move_insn (gen_realpart (mode, target), op0);
8355 emit_move_insn (gen_imagpart (mode, target), op1);
8356
8357 insns = get_insns ();
8358 end_sequence ();
8359
8360 /* Complex construction should appear as a single unit. */
8361 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8362 each with a separate pseudo as destination.
8363 It's not correct for flow to treat them as a unit. */
8364 if (GET_CODE (target) != CONCAT)
8365 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8366 else
8367 emit_insns (insns);
8368
8369 return target;
8370 }
8371
8372 case REALPART_EXPR:
8373 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8374 return gen_realpart (mode, op0);
8375
8376 case IMAGPART_EXPR:
8377 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8378 return gen_imagpart (mode, op0);
8379
8380 case CONJ_EXPR:
8381 {
8382 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8383 rtx imag_t;
8384 rtx insns;
8385
8386 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8387
8388 if (! target)
8389 target = gen_reg_rtx (mode);
8390
8391 start_sequence ();
8392
8393 /* Store the realpart and the negated imagpart to target. */
8394 emit_move_insn (gen_realpart (partmode, target),
8395 gen_realpart (partmode, op0));
8396
8397 imag_t = gen_imagpart (partmode, target);
8398 temp = expand_unop (partmode, neg_optab,
8399 gen_imagpart (partmode, op0), imag_t, 0);
8400 if (temp != imag_t)
8401 emit_move_insn (imag_t, temp);
8402
8403 insns = get_insns ();
8404 end_sequence ();
8405
8406 /* Conjugate should appear as a single unit
8407 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8408 each with a separate pseudo as destination.
8409 It's not correct for flow to treat them as a unit. */
8410 if (GET_CODE (target) != CONCAT)
8411 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8412 else
8413 emit_insns (insns);
8414
8415 return target;
8416 }
8417
8418 case TRY_CATCH_EXPR:
8419 {
8420 tree handler = TREE_OPERAND (exp, 1);
8421
8422 expand_eh_region_start ();
8423
8424 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8425
8426 expand_eh_region_end (handler);
8427
8428 return op0;
8429 }
8430
8431 case TRY_FINALLY_EXPR:
8432 {
8433 tree try_block = TREE_OPERAND (exp, 0);
8434 tree finally_block = TREE_OPERAND (exp, 1);
8435 rtx finally_label = gen_label_rtx ();
8436 rtx done_label = gen_label_rtx ();
8437 rtx return_link = gen_reg_rtx (Pmode);
8438 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8439 (tree) finally_label, (tree) return_link);
8440 TREE_SIDE_EFFECTS (cleanup) = 1;
8441
8442 /* Start a new binding layer that will keep track of all cleanup
8443 actions to be performed. */
8444 expand_start_bindings (2);
8445
8446 target_temp_slot_level = temp_slot_level;
8447
8448 expand_decl_cleanup (NULL_TREE, cleanup);
8449 op0 = expand_expr (try_block, target, tmode, modifier);
8450
8451 preserve_temp_slots (op0);
8452 expand_end_bindings (NULL_TREE, 0, 0);
8453 emit_jump (done_label);
8454 emit_label (finally_label);
8455 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8456 emit_indirect_jump (return_link);
8457 emit_label (done_label);
8458 return op0;
8459 }
8460
8461 case GOTO_SUBROUTINE_EXPR:
8462 {
8463 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8464 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8465 rtx return_address = gen_label_rtx ();
8466 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8467 emit_jump (subr);
8468 emit_label (return_address);
8469 return const0_rtx;
8470 }
8471
8472 case POPDCC_EXPR:
8473 {
8474 rtx dcc = get_dynamic_cleanup_chain ();
8475 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8476 return const0_rtx;
8477 }
8478
8479 case POPDHC_EXPR:
8480 {
8481 rtx dhc = get_dynamic_handler_chain ();
8482 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8483 return const0_rtx;
8484 }
8485
8486 case VA_ARG_EXPR:
8487 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8488
8489 default:
8490 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8491 }
8492
8493 /* Here to do an ordinary binary operator, generating an instruction
8494 from the optab already placed in `this_optab'. */
8495 binop:
8496 preexpand_calls (exp);
8497 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8498 subtarget = 0;
8499 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8500 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8501 binop2:
8502 temp = expand_binop (mode, this_optab, op0, op1, target,
8503 unsignedp, OPTAB_LIB_WIDEN);
8504 if (temp == 0)
8505 abort ();
8506 return temp;
8507 }
8508 \f
8509 /* Similar to expand_expr, except that we don't specify a target, target
8510 mode, or modifier and we return the alignment of the inner type. This is
8511 used in cases where it is not necessary to align the result to the
8512 alignment of its type as long as we know the alignment of the result, for
8513 example for comparisons of BLKmode values. */
8514
8515 static rtx
8516 expand_expr_unaligned (exp, palign)
8517 register tree exp;
8518 unsigned int *palign;
8519 {
8520 register rtx op0;
8521 tree type = TREE_TYPE (exp);
8522 register enum machine_mode mode = TYPE_MODE (type);
8523
8524 /* Default the alignment we return to that of the type. */
8525 *palign = TYPE_ALIGN (type);
8526
8527 /* The only cases in which we do anything special is if the resulting mode
8528 is BLKmode. */
8529 if (mode != BLKmode)
8530 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8531
8532 switch (TREE_CODE (exp))
8533 {
8534 case CONVERT_EXPR:
8535 case NOP_EXPR:
8536 case NON_LVALUE_EXPR:
8537 /* Conversions between BLKmode values don't change the underlying
8538 alignment or value. */
8539 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8540 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8541 break;
8542
8543 case ARRAY_REF:
8544 /* Much of the code for this case is copied directly from expand_expr.
8545 We need to duplicate it here because we will do something different
8546 in the fall-through case, so we need to handle the same exceptions
8547 it does. */
8548 {
8549 tree array = TREE_OPERAND (exp, 0);
8550 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8551 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8552 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8553 HOST_WIDE_INT i;
8554
8555 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8556 abort ();
8557
8558 /* Optimize the special-case of a zero lower bound.
8559
8560 We convert the low_bound to sizetype to avoid some problems
8561 with constant folding. (E.g. suppose the lower bound is 1,
8562 and its mode is QI. Without the conversion, (ARRAY
8563 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8564 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8565
8566 if (! integer_zerop (low_bound))
8567 index = size_diffop (index, convert (sizetype, low_bound));
8568
8569 /* If this is a constant index into a constant array,
8570 just get the value from the array. Handle both the cases when
8571 we have an explicit constructor and when our operand is a variable
8572 that was declared const. */
8573
8574 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8575 && 0 > compare_tree_int (index,
8576 list_length (CONSTRUCTOR_ELTS
8577 (TREE_OPERAND (exp, 0)))))
8578 {
8579 tree elem;
8580
8581 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8582 i = TREE_INT_CST_LOW (index);
8583 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8584 ;
8585
8586 if (elem)
8587 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8588 }
8589
8590 else if (optimize >= 1
8591 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8592 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8593 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8594 {
8595 if (TREE_CODE (index) == INTEGER_CST)
8596 {
8597 tree init = DECL_INITIAL (array);
8598
8599 if (TREE_CODE (init) == CONSTRUCTOR)
8600 {
8601 tree elem;
8602
8603 for (elem = CONSTRUCTOR_ELTS (init);
8604 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8605 elem = TREE_CHAIN (elem))
8606 ;
8607
8608 if (elem)
8609 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8610 palign);
8611 }
8612 }
8613 }
8614 }
8615
8616 /* ... fall through ... */
8617
8618 case COMPONENT_REF:
8619 case BIT_FIELD_REF:
8620 /* If the operand is a CONSTRUCTOR, we can just extract the
8621 appropriate field if it is present. Don't do this if we have
8622 already written the data since we want to refer to that copy
8623 and varasm.c assumes that's what we'll do. */
8624 if (TREE_CODE (exp) != ARRAY_REF
8625 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8626 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8627 {
8628 tree elt;
8629
8630 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8631 elt = TREE_CHAIN (elt))
8632 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8633 /* Note that unlike the case in expand_expr, we know this is
8634 BLKmode and hence not an integer. */
8635 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8636 }
8637
8638 {
8639 enum machine_mode mode1;
8640 HOST_WIDE_INT bitsize, bitpos;
8641 tree offset;
8642 int volatilep = 0;
8643 unsigned int alignment;
8644 int unsignedp;
8645 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8646 &mode1, &unsignedp, &volatilep,
8647 &alignment);
8648
8649 /* If we got back the original object, something is wrong. Perhaps
8650 we are evaluating an expression too early. In any event, don't
8651 infinitely recurse. */
8652 if (tem == exp)
8653 abort ();
8654
8655 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8656
8657 /* If this is a constant, put it into a register if it is a
8658 legitimate constant and OFFSET is 0 and memory if it isn't. */
8659 if (CONSTANT_P (op0))
8660 {
8661 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8662
8663 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8664 && offset == 0)
8665 op0 = force_reg (inner_mode, op0);
8666 else
8667 op0 = validize_mem (force_const_mem (inner_mode, op0));
8668 }
8669
8670 if (offset != 0)
8671 {
8672 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8673
8674 /* If this object is in a register, put it into memory.
8675 This case can't occur in C, but can in Ada if we have
8676 unchecked conversion of an expression from a scalar type to
8677 an array or record type. */
8678 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8679 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8680 {
8681 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8682
8683 mark_temp_addr_taken (memloc);
8684 emit_move_insn (memloc, op0);
8685 op0 = memloc;
8686 }
8687
8688 if (GET_CODE (op0) != MEM)
8689 abort ();
8690
8691 if (GET_MODE (offset_rtx) != ptr_mode)
8692 {
8693 #ifdef POINTERS_EXTEND_UNSIGNED
8694 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8695 #else
8696 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8697 #endif
8698 }
8699
8700 op0 = change_address (op0, VOIDmode,
8701 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8702 force_reg (ptr_mode,
8703 offset_rtx)));
8704 }
8705
8706 /* Don't forget about volatility even if this is a bitfield. */
8707 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8708 {
8709 op0 = copy_rtx (op0);
8710 MEM_VOLATILE_P (op0) = 1;
8711 }
8712
8713 /* Check the access. */
8714 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8715 {
8716 rtx to;
8717 int size;
8718
8719 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8720 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8721
8722 /* Check the access right of the pointer. */
8723 if (size > BITS_PER_UNIT)
8724 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8725 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8726 TYPE_MODE (sizetype),
8727 GEN_INT (MEMORY_USE_RO),
8728 TYPE_MODE (integer_type_node));
8729 }
8730
8731 /* In cases where an aligned union has an unaligned object
8732 as a field, we might be extracting a BLKmode value from
8733 an integer-mode (e.g., SImode) object. Handle this case
8734 by doing the extract into an object as wide as the field
8735 (which we know to be the width of a basic mode), then
8736 storing into memory, and changing the mode to BLKmode.
8737 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8738 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8739 if (mode1 == VOIDmode
8740 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8741 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8742 && (TYPE_ALIGN (type) > alignment
8743 || bitpos % TYPE_ALIGN (type) != 0)))
8744 {
8745 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8746
8747 if (ext_mode == BLKmode)
8748 {
8749 /* In this case, BITPOS must start at a byte boundary. */
8750 if (GET_CODE (op0) != MEM
8751 || bitpos % BITS_PER_UNIT != 0)
8752 abort ();
8753
8754 op0 = change_address (op0, VOIDmode,
8755 plus_constant (XEXP (op0, 0),
8756 bitpos / BITS_PER_UNIT));
8757 }
8758 else
8759 {
8760 rtx new = assign_stack_temp (ext_mode,
8761 bitsize / BITS_PER_UNIT, 0);
8762
8763 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8764 unsignedp, NULL_RTX, ext_mode,
8765 ext_mode, alignment,
8766 int_size_in_bytes (TREE_TYPE (tem)));
8767
8768 /* If the result is a record type and BITSIZE is narrower than
8769 the mode of OP0, an integral mode, and this is a big endian
8770 machine, we must put the field into the high-order bits. */
8771 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8772 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8773 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8774 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8775 size_int (GET_MODE_BITSIZE
8776 (GET_MODE (op0))
8777 - bitsize),
8778 op0, 1);
8779
8780
8781 emit_move_insn (new, op0);
8782 op0 = copy_rtx (new);
8783 PUT_MODE (op0, BLKmode);
8784 }
8785 }
8786 else
8787 /* Get a reference to just this component. */
8788 op0 = change_address (op0, mode1,
8789 plus_constant (XEXP (op0, 0),
8790 (bitpos / BITS_PER_UNIT)));
8791
8792 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8793
8794 /* Adjust the alignment in case the bit position is not
8795 a multiple of the alignment of the inner object. */
8796 while (bitpos % alignment != 0)
8797 alignment >>= 1;
8798
8799 if (GET_CODE (XEXP (op0, 0)) == REG)
8800 mark_reg_pointer (XEXP (op0, 0), alignment);
8801
8802 MEM_IN_STRUCT_P (op0) = 1;
8803 MEM_VOLATILE_P (op0) |= volatilep;
8804
8805 *palign = alignment;
8806 return op0;
8807 }
8808
8809 default:
8810 break;
8811
8812 }
8813
8814 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8815 }
8816 \f
8817 /* Return the tree node if a ARG corresponds to a string constant or zero
8818 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8819 in bytes within the string that ARG is accessing. The type of the
8820 offset will be `sizetype'. */
8821
8822 tree
8823 string_constant (arg, ptr_offset)
8824 tree arg;
8825 tree *ptr_offset;
8826 {
8827 STRIP_NOPS (arg);
8828
8829 if (TREE_CODE (arg) == ADDR_EXPR
8830 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8831 {
8832 *ptr_offset = size_zero_node;
8833 return TREE_OPERAND (arg, 0);
8834 }
8835 else if (TREE_CODE (arg) == PLUS_EXPR)
8836 {
8837 tree arg0 = TREE_OPERAND (arg, 0);
8838 tree arg1 = TREE_OPERAND (arg, 1);
8839
8840 STRIP_NOPS (arg0);
8841 STRIP_NOPS (arg1);
8842
8843 if (TREE_CODE (arg0) == ADDR_EXPR
8844 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8845 {
8846 *ptr_offset = convert (sizetype, arg1);
8847 return TREE_OPERAND (arg0, 0);
8848 }
8849 else if (TREE_CODE (arg1) == ADDR_EXPR
8850 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8851 {
8852 *ptr_offset = convert (sizetype, arg0);
8853 return TREE_OPERAND (arg1, 0);
8854 }
8855 }
8856
8857 return 0;
8858 }
8859 \f
8860 /* Expand code for a post- or pre- increment or decrement
8861 and return the RTX for the result.
8862 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8863
8864 static rtx
8865 expand_increment (exp, post, ignore)
8866 register tree exp;
8867 int post, ignore;
8868 {
8869 register rtx op0, op1;
8870 register rtx temp, value;
8871 register tree incremented = TREE_OPERAND (exp, 0);
8872 optab this_optab = add_optab;
8873 int icode;
8874 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8875 int op0_is_copy = 0;
8876 int single_insn = 0;
8877 /* 1 means we can't store into OP0 directly,
8878 because it is a subreg narrower than a word,
8879 and we don't dare clobber the rest of the word. */
8880 int bad_subreg = 0;
8881
8882 /* Stabilize any component ref that might need to be
8883 evaluated more than once below. */
8884 if (!post
8885 || TREE_CODE (incremented) == BIT_FIELD_REF
8886 || (TREE_CODE (incremented) == COMPONENT_REF
8887 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8888 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8889 incremented = stabilize_reference (incremented);
8890 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8891 ones into save exprs so that they don't accidentally get evaluated
8892 more than once by the code below. */
8893 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8894 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8895 incremented = save_expr (incremented);
8896
8897 /* Compute the operands as RTX.
8898 Note whether OP0 is the actual lvalue or a copy of it:
8899 I believe it is a copy iff it is a register or subreg
8900 and insns were generated in computing it. */
8901
8902 temp = get_last_insn ();
8903 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8904
8905 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8906 in place but instead must do sign- or zero-extension during assignment,
8907 so we copy it into a new register and let the code below use it as
8908 a copy.
8909
8910 Note that we can safely modify this SUBREG since it is know not to be
8911 shared (it was made by the expand_expr call above). */
8912
8913 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8914 {
8915 if (post)
8916 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8917 else
8918 bad_subreg = 1;
8919 }
8920 else if (GET_CODE (op0) == SUBREG
8921 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8922 {
8923 /* We cannot increment this SUBREG in place. If we are
8924 post-incrementing, get a copy of the old value. Otherwise,
8925 just mark that we cannot increment in place. */
8926 if (post)
8927 op0 = copy_to_reg (op0);
8928 else
8929 bad_subreg = 1;
8930 }
8931
8932 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8933 && temp != get_last_insn ());
8934 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8935 EXPAND_MEMORY_USE_BAD);
8936
8937 /* Decide whether incrementing or decrementing. */
8938 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8939 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8940 this_optab = sub_optab;
8941
8942 /* Convert decrement by a constant into a negative increment. */
8943 if (this_optab == sub_optab
8944 && GET_CODE (op1) == CONST_INT)
8945 {
8946 op1 = GEN_INT (- INTVAL (op1));
8947 this_optab = add_optab;
8948 }
8949
8950 /* For a preincrement, see if we can do this with a single instruction. */
8951 if (!post)
8952 {
8953 icode = (int) this_optab->handlers[(int) mode].insn_code;
8954 if (icode != (int) CODE_FOR_nothing
8955 /* Make sure that OP0 is valid for operands 0 and 1
8956 of the insn we want to queue. */
8957 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8958 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8959 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8960 single_insn = 1;
8961 }
8962
8963 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8964 then we cannot just increment OP0. We must therefore contrive to
8965 increment the original value. Then, for postincrement, we can return
8966 OP0 since it is a copy of the old value. For preincrement, expand here
8967 unless we can do it with a single insn.
8968
8969 Likewise if storing directly into OP0 would clobber high bits
8970 we need to preserve (bad_subreg). */
8971 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8972 {
8973 /* This is the easiest way to increment the value wherever it is.
8974 Problems with multiple evaluation of INCREMENTED are prevented
8975 because either (1) it is a component_ref or preincrement,
8976 in which case it was stabilized above, or (2) it is an array_ref
8977 with constant index in an array in a register, which is
8978 safe to reevaluate. */
8979 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8980 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8981 ? MINUS_EXPR : PLUS_EXPR),
8982 TREE_TYPE (exp),
8983 incremented,
8984 TREE_OPERAND (exp, 1));
8985
8986 while (TREE_CODE (incremented) == NOP_EXPR
8987 || TREE_CODE (incremented) == CONVERT_EXPR)
8988 {
8989 newexp = convert (TREE_TYPE (incremented), newexp);
8990 incremented = TREE_OPERAND (incremented, 0);
8991 }
8992
8993 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8994 return post ? op0 : temp;
8995 }
8996
8997 if (post)
8998 {
8999 /* We have a true reference to the value in OP0.
9000 If there is an insn to add or subtract in this mode, queue it.
9001 Queueing the increment insn avoids the register shuffling
9002 that often results if we must increment now and first save
9003 the old value for subsequent use. */
9004
9005 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9006 op0 = stabilize (op0);
9007 #endif
9008
9009 icode = (int) this_optab->handlers[(int) mode].insn_code;
9010 if (icode != (int) CODE_FOR_nothing
9011 /* Make sure that OP0 is valid for operands 0 and 1
9012 of the insn we want to queue. */
9013 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9014 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9015 {
9016 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9017 op1 = force_reg (mode, op1);
9018
9019 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9020 }
9021 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9022 {
9023 rtx addr = (general_operand (XEXP (op0, 0), mode)
9024 ? force_reg (Pmode, XEXP (op0, 0))
9025 : copy_to_reg (XEXP (op0, 0)));
9026 rtx temp, result;
9027
9028 op0 = change_address (op0, VOIDmode, addr);
9029 temp = force_reg (GET_MODE (op0), op0);
9030 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9031 op1 = force_reg (mode, op1);
9032
9033 /* The increment queue is LIFO, thus we have to `queue'
9034 the instructions in reverse order. */
9035 enqueue_insn (op0, gen_move_insn (op0, temp));
9036 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9037 return result;
9038 }
9039 }
9040
9041 /* Preincrement, or we can't increment with one simple insn. */
9042 if (post)
9043 /* Save a copy of the value before inc or dec, to return it later. */
9044 temp = value = copy_to_reg (op0);
9045 else
9046 /* Arrange to return the incremented value. */
9047 /* Copy the rtx because expand_binop will protect from the queue,
9048 and the results of that would be invalid for us to return
9049 if our caller does emit_queue before using our result. */
9050 temp = copy_rtx (value = op0);
9051
9052 /* Increment however we can. */
9053 op1 = expand_binop (mode, this_optab, value, op1,
9054 current_function_check_memory_usage ? NULL_RTX : op0,
9055 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9056 /* Make sure the value is stored into OP0. */
9057 if (op1 != op0)
9058 emit_move_insn (op0, op1);
9059
9060 return temp;
9061 }
9062 \f
9063 /* Expand all function calls contained within EXP, innermost ones first.
9064 But don't look within expressions that have sequence points.
9065 For each CALL_EXPR, record the rtx for its value
9066 in the CALL_EXPR_RTL field. */
9067
9068 static void
9069 preexpand_calls (exp)
9070 tree exp;
9071 {
9072 register int nops, i;
9073 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9074
9075 if (! do_preexpand_calls)
9076 return;
9077
9078 /* Only expressions and references can contain calls. */
9079
9080 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9081 return;
9082
9083 switch (TREE_CODE (exp))
9084 {
9085 case CALL_EXPR:
9086 /* Do nothing if already expanded. */
9087 if (CALL_EXPR_RTL (exp) != 0
9088 /* Do nothing if the call returns a variable-sized object. */
9089 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9090 /* Do nothing to built-in functions. */
9091 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9092 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9093 == FUNCTION_DECL)
9094 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9095 return;
9096
9097 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9098 return;
9099
9100 case COMPOUND_EXPR:
9101 case COND_EXPR:
9102 case TRUTH_ANDIF_EXPR:
9103 case TRUTH_ORIF_EXPR:
9104 /* If we find one of these, then we can be sure
9105 the adjust will be done for it (since it makes jumps).
9106 Do it now, so that if this is inside an argument
9107 of a function, we don't get the stack adjustment
9108 after some other args have already been pushed. */
9109 do_pending_stack_adjust ();
9110 return;
9111
9112 case BLOCK:
9113 case RTL_EXPR:
9114 case WITH_CLEANUP_EXPR:
9115 case CLEANUP_POINT_EXPR:
9116 case TRY_CATCH_EXPR:
9117 return;
9118
9119 case SAVE_EXPR:
9120 if (SAVE_EXPR_RTL (exp) != 0)
9121 return;
9122
9123 default:
9124 break;
9125 }
9126
9127 nops = tree_code_length[(int) TREE_CODE (exp)];
9128 for (i = 0; i < nops; i++)
9129 if (TREE_OPERAND (exp, i) != 0)
9130 {
9131 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9132 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9133 It doesn't happen before the call is made. */
9134 ;
9135 else
9136 {
9137 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9138 if (type == 'e' || type == '<' || type == '1' || type == '2'
9139 || type == 'r')
9140 preexpand_calls (TREE_OPERAND (exp, i));
9141 }
9142 }
9143 }
9144 \f
9145 /* At the start of a function, record that we have no previously-pushed
9146 arguments waiting to be popped. */
9147
9148 void
9149 init_pending_stack_adjust ()
9150 {
9151 pending_stack_adjust = 0;
9152 }
9153
9154 /* When exiting from function, if safe, clear out any pending stack adjust
9155 so the adjustment won't get done.
9156
9157 Note, if the current function calls alloca, then it must have a
9158 frame pointer regardless of the value of flag_omit_frame_pointer. */
9159
9160 void
9161 clear_pending_stack_adjust ()
9162 {
9163 #ifdef EXIT_IGNORE_STACK
9164 if (optimize > 0
9165 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9166 && EXIT_IGNORE_STACK
9167 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9168 && ! flag_inline_functions)
9169 {
9170 stack_pointer_delta -= pending_stack_adjust,
9171 pending_stack_adjust = 0;
9172 }
9173 #endif
9174 }
9175
9176 /* Pop any previously-pushed arguments that have not been popped yet. */
9177
9178 void
9179 do_pending_stack_adjust ()
9180 {
9181 if (inhibit_defer_pop == 0)
9182 {
9183 if (pending_stack_adjust != 0)
9184 adjust_stack (GEN_INT (pending_stack_adjust));
9185 pending_stack_adjust = 0;
9186 }
9187 }
9188 \f
9189 /* Expand conditional expressions. */
9190
9191 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9192 LABEL is an rtx of code CODE_LABEL, in this function and all the
9193 functions here. */
9194
9195 void
9196 jumpifnot (exp, label)
9197 tree exp;
9198 rtx label;
9199 {
9200 do_jump (exp, label, NULL_RTX);
9201 }
9202
9203 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9204
9205 void
9206 jumpif (exp, label)
9207 tree exp;
9208 rtx label;
9209 {
9210 do_jump (exp, NULL_RTX, label);
9211 }
9212
9213 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9214 the result is zero, or IF_TRUE_LABEL if the result is one.
9215 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9216 meaning fall through in that case.
9217
9218 do_jump always does any pending stack adjust except when it does not
9219 actually perform a jump. An example where there is no jump
9220 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9221
9222 This function is responsible for optimizing cases such as
9223 &&, || and comparison operators in EXP. */
9224
9225 void
9226 do_jump (exp, if_false_label, if_true_label)
9227 tree exp;
9228 rtx if_false_label, if_true_label;
9229 {
9230 register enum tree_code code = TREE_CODE (exp);
9231 /* Some cases need to create a label to jump to
9232 in order to properly fall through.
9233 These cases set DROP_THROUGH_LABEL nonzero. */
9234 rtx drop_through_label = 0;
9235 rtx temp;
9236 int i;
9237 tree type;
9238 enum machine_mode mode;
9239
9240 #ifdef MAX_INTEGER_COMPUTATION_MODE
9241 check_max_integer_computation_mode (exp);
9242 #endif
9243
9244 emit_queue ();
9245
9246 switch (code)
9247 {
9248 case ERROR_MARK:
9249 break;
9250
9251 case INTEGER_CST:
9252 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9253 if (temp)
9254 emit_jump (temp);
9255 break;
9256
9257 #if 0
9258 /* This is not true with #pragma weak */
9259 case ADDR_EXPR:
9260 /* The address of something can never be zero. */
9261 if (if_true_label)
9262 emit_jump (if_true_label);
9263 break;
9264 #endif
9265
9266 case NOP_EXPR:
9267 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9268 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9269 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9270 goto normal;
9271 case CONVERT_EXPR:
9272 /* If we are narrowing the operand, we have to do the compare in the
9273 narrower mode. */
9274 if ((TYPE_PRECISION (TREE_TYPE (exp))
9275 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9276 goto normal;
9277 case NON_LVALUE_EXPR:
9278 case REFERENCE_EXPR:
9279 case ABS_EXPR:
9280 case NEGATE_EXPR:
9281 case LROTATE_EXPR:
9282 case RROTATE_EXPR:
9283 /* These cannot change zero->non-zero or vice versa. */
9284 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9285 break;
9286
9287 case WITH_RECORD_EXPR:
9288 /* Put the object on the placeholder list, recurse through our first
9289 operand, and pop the list. */
9290 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9291 placeholder_list);
9292 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9293 placeholder_list = TREE_CHAIN (placeholder_list);
9294 break;
9295
9296 #if 0
9297 /* This is never less insns than evaluating the PLUS_EXPR followed by
9298 a test and can be longer if the test is eliminated. */
9299 case PLUS_EXPR:
9300 /* Reduce to minus. */
9301 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9302 TREE_OPERAND (exp, 0),
9303 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9304 TREE_OPERAND (exp, 1))));
9305 /* Process as MINUS. */
9306 #endif
9307
9308 case MINUS_EXPR:
9309 /* Non-zero iff operands of minus differ. */
9310 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9311 TREE_OPERAND (exp, 0),
9312 TREE_OPERAND (exp, 1)),
9313 NE, NE, if_false_label, if_true_label);
9314 break;
9315
9316 case BIT_AND_EXPR:
9317 /* If we are AND'ing with a small constant, do this comparison in the
9318 smallest type that fits. If the machine doesn't have comparisons
9319 that small, it will be converted back to the wider comparison.
9320 This helps if we are testing the sign bit of a narrower object.
9321 combine can't do this for us because it can't know whether a
9322 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9323
9324 if (! SLOW_BYTE_ACCESS
9325 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9326 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9327 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9328 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9329 && (type = type_for_mode (mode, 1)) != 0
9330 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9331 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9332 != CODE_FOR_nothing))
9333 {
9334 do_jump (convert (type, exp), if_false_label, if_true_label);
9335 break;
9336 }
9337 goto normal;
9338
9339 case TRUTH_NOT_EXPR:
9340 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9341 break;
9342
9343 case TRUTH_ANDIF_EXPR:
9344 if (if_false_label == 0)
9345 if_false_label = drop_through_label = gen_label_rtx ();
9346 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9347 start_cleanup_deferral ();
9348 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9349 end_cleanup_deferral ();
9350 break;
9351
9352 case TRUTH_ORIF_EXPR:
9353 if (if_true_label == 0)
9354 if_true_label = drop_through_label = gen_label_rtx ();
9355 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9356 start_cleanup_deferral ();
9357 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9358 end_cleanup_deferral ();
9359 break;
9360
9361 case COMPOUND_EXPR:
9362 push_temp_slots ();
9363 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9364 preserve_temp_slots (NULL_RTX);
9365 free_temp_slots ();
9366 pop_temp_slots ();
9367 emit_queue ();
9368 do_pending_stack_adjust ();
9369 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9370 break;
9371
9372 case COMPONENT_REF:
9373 case BIT_FIELD_REF:
9374 case ARRAY_REF:
9375 {
9376 HOST_WIDE_INT bitsize, bitpos;
9377 int unsignedp;
9378 enum machine_mode mode;
9379 tree type;
9380 tree offset;
9381 int volatilep = 0;
9382 unsigned int alignment;
9383
9384 /* Get description of this reference. We don't actually care
9385 about the underlying object here. */
9386 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9387 &unsignedp, &volatilep, &alignment);
9388
9389 type = type_for_size (bitsize, unsignedp);
9390 if (! SLOW_BYTE_ACCESS
9391 && type != 0 && bitsize >= 0
9392 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9393 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9394 != CODE_FOR_nothing))
9395 {
9396 do_jump (convert (type, exp), if_false_label, if_true_label);
9397 break;
9398 }
9399 goto normal;
9400 }
9401
9402 case COND_EXPR:
9403 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9404 if (integer_onep (TREE_OPERAND (exp, 1))
9405 && integer_zerop (TREE_OPERAND (exp, 2)))
9406 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9407
9408 else if (integer_zerop (TREE_OPERAND (exp, 1))
9409 && integer_onep (TREE_OPERAND (exp, 2)))
9410 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9411
9412 else
9413 {
9414 register rtx label1 = gen_label_rtx ();
9415 drop_through_label = gen_label_rtx ();
9416
9417 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9418
9419 start_cleanup_deferral ();
9420 /* Now the THEN-expression. */
9421 do_jump (TREE_OPERAND (exp, 1),
9422 if_false_label ? if_false_label : drop_through_label,
9423 if_true_label ? if_true_label : drop_through_label);
9424 /* In case the do_jump just above never jumps. */
9425 do_pending_stack_adjust ();
9426 emit_label (label1);
9427
9428 /* Now the ELSE-expression. */
9429 do_jump (TREE_OPERAND (exp, 2),
9430 if_false_label ? if_false_label : drop_through_label,
9431 if_true_label ? if_true_label : drop_through_label);
9432 end_cleanup_deferral ();
9433 }
9434 break;
9435
9436 case EQ_EXPR:
9437 {
9438 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9439
9440 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9441 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9442 {
9443 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9444 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9445 do_jump
9446 (fold
9447 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9448 fold (build (EQ_EXPR, TREE_TYPE (exp),
9449 fold (build1 (REALPART_EXPR,
9450 TREE_TYPE (inner_type),
9451 exp0)),
9452 fold (build1 (REALPART_EXPR,
9453 TREE_TYPE (inner_type),
9454 exp1)))),
9455 fold (build (EQ_EXPR, TREE_TYPE (exp),
9456 fold (build1 (IMAGPART_EXPR,
9457 TREE_TYPE (inner_type),
9458 exp0)),
9459 fold (build1 (IMAGPART_EXPR,
9460 TREE_TYPE (inner_type),
9461 exp1)))))),
9462 if_false_label, if_true_label);
9463 }
9464
9465 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9466 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9467
9468 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9469 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9470 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9471 else
9472 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9473 break;
9474 }
9475
9476 case NE_EXPR:
9477 {
9478 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9479
9480 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9481 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9482 {
9483 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9484 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9485 do_jump
9486 (fold
9487 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9488 fold (build (NE_EXPR, TREE_TYPE (exp),
9489 fold (build1 (REALPART_EXPR,
9490 TREE_TYPE (inner_type),
9491 exp0)),
9492 fold (build1 (REALPART_EXPR,
9493 TREE_TYPE (inner_type),
9494 exp1)))),
9495 fold (build (NE_EXPR, TREE_TYPE (exp),
9496 fold (build1 (IMAGPART_EXPR,
9497 TREE_TYPE (inner_type),
9498 exp0)),
9499 fold (build1 (IMAGPART_EXPR,
9500 TREE_TYPE (inner_type),
9501 exp1)))))),
9502 if_false_label, if_true_label);
9503 }
9504
9505 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9506 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9507
9508 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9509 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9510 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9511 else
9512 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9513 break;
9514 }
9515
9516 case LT_EXPR:
9517 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9518 if (GET_MODE_CLASS (mode) == MODE_INT
9519 && ! can_compare_p (LT, mode, ccp_jump))
9520 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9521 else
9522 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9523 break;
9524
9525 case LE_EXPR:
9526 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9527 if (GET_MODE_CLASS (mode) == MODE_INT
9528 && ! can_compare_p (LE, mode, ccp_jump))
9529 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9530 else
9531 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9532 break;
9533
9534 case GT_EXPR:
9535 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9536 if (GET_MODE_CLASS (mode) == MODE_INT
9537 && ! can_compare_p (GT, mode, ccp_jump))
9538 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9539 else
9540 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9541 break;
9542
9543 case GE_EXPR:
9544 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9545 if (GET_MODE_CLASS (mode) == MODE_INT
9546 && ! can_compare_p (GE, mode, ccp_jump))
9547 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9548 else
9549 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9550 break;
9551
9552 case UNORDERED_EXPR:
9553 case ORDERED_EXPR:
9554 {
9555 enum rtx_code cmp, rcmp;
9556 int do_rev;
9557
9558 if (code == UNORDERED_EXPR)
9559 cmp = UNORDERED, rcmp = ORDERED;
9560 else
9561 cmp = ORDERED, rcmp = UNORDERED;
9562 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9563
9564 do_rev = 0;
9565 if (! can_compare_p (cmp, mode, ccp_jump)
9566 && (can_compare_p (rcmp, mode, ccp_jump)
9567 /* If the target doesn't provide either UNORDERED or ORDERED
9568 comparisons, canonicalize on UNORDERED for the library. */
9569 || rcmp == UNORDERED))
9570 do_rev = 1;
9571
9572 if (! do_rev)
9573 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9574 else
9575 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9576 }
9577 break;
9578
9579 {
9580 enum rtx_code rcode1;
9581 enum tree_code tcode2;
9582
9583 case UNLT_EXPR:
9584 rcode1 = UNLT;
9585 tcode2 = LT_EXPR;
9586 goto unordered_bcc;
9587 case UNLE_EXPR:
9588 rcode1 = UNLE;
9589 tcode2 = LE_EXPR;
9590 goto unordered_bcc;
9591 case UNGT_EXPR:
9592 rcode1 = UNGT;
9593 tcode2 = GT_EXPR;
9594 goto unordered_bcc;
9595 case UNGE_EXPR:
9596 rcode1 = UNGE;
9597 tcode2 = GE_EXPR;
9598 goto unordered_bcc;
9599 case UNEQ_EXPR:
9600 rcode1 = UNEQ;
9601 tcode2 = EQ_EXPR;
9602 goto unordered_bcc;
9603
9604 unordered_bcc:
9605 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9606 if (can_compare_p (rcode1, mode, ccp_jump))
9607 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9608 if_true_label);
9609 else
9610 {
9611 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9612 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9613 tree cmp0, cmp1;
9614
9615 /* If the target doesn't support combined unordered
9616 compares, decompose into UNORDERED + comparison. */
9617 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9618 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9619 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9620 do_jump (exp, if_false_label, if_true_label);
9621 }
9622 }
9623 break;
9624
9625 default:
9626 normal:
9627 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9628 #if 0
9629 /* This is not needed any more and causes poor code since it causes
9630 comparisons and tests from non-SI objects to have different code
9631 sequences. */
9632 /* Copy to register to avoid generating bad insns by cse
9633 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9634 if (!cse_not_expected && GET_CODE (temp) == MEM)
9635 temp = copy_to_reg (temp);
9636 #endif
9637 do_pending_stack_adjust ();
9638 /* Do any postincrements in the expression that was tested. */
9639 emit_queue ();
9640
9641 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9642 {
9643 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9644 if (target)
9645 emit_jump (target);
9646 }
9647 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9648 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9649 /* Note swapping the labels gives us not-equal. */
9650 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9651 else if (GET_MODE (temp) != VOIDmode)
9652 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9653 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9654 GET_MODE (temp), NULL_RTX, 0,
9655 if_false_label, if_true_label);
9656 else
9657 abort ();
9658 }
9659
9660 if (drop_through_label)
9661 {
9662 /* If do_jump produces code that might be jumped around,
9663 do any stack adjusts from that code, before the place
9664 where control merges in. */
9665 do_pending_stack_adjust ();
9666 emit_label (drop_through_label);
9667 }
9668 }
9669 \f
9670 /* Given a comparison expression EXP for values too wide to be compared
9671 with one insn, test the comparison and jump to the appropriate label.
9672 The code of EXP is ignored; we always test GT if SWAP is 0,
9673 and LT if SWAP is 1. */
9674
9675 static void
9676 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9677 tree exp;
9678 int swap;
9679 rtx if_false_label, if_true_label;
9680 {
9681 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9682 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9683 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9684 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9685
9686 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9687 }
9688
9689 /* Compare OP0 with OP1, word at a time, in mode MODE.
9690 UNSIGNEDP says to do unsigned comparison.
9691 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9692
9693 void
9694 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9695 enum machine_mode mode;
9696 int unsignedp;
9697 rtx op0, op1;
9698 rtx if_false_label, if_true_label;
9699 {
9700 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9701 rtx drop_through_label = 0;
9702 int i;
9703
9704 if (! if_true_label || ! if_false_label)
9705 drop_through_label = gen_label_rtx ();
9706 if (! if_true_label)
9707 if_true_label = drop_through_label;
9708 if (! if_false_label)
9709 if_false_label = drop_through_label;
9710
9711 /* Compare a word at a time, high order first. */
9712 for (i = 0; i < nwords; i++)
9713 {
9714 rtx op0_word, op1_word;
9715
9716 if (WORDS_BIG_ENDIAN)
9717 {
9718 op0_word = operand_subword_force (op0, i, mode);
9719 op1_word = operand_subword_force (op1, i, mode);
9720 }
9721 else
9722 {
9723 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9724 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9725 }
9726
9727 /* All but high-order word must be compared as unsigned. */
9728 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9729 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9730 NULL_RTX, if_true_label);
9731
9732 /* Consider lower words only if these are equal. */
9733 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9734 NULL_RTX, 0, NULL_RTX, if_false_label);
9735 }
9736
9737 if (if_false_label)
9738 emit_jump (if_false_label);
9739 if (drop_through_label)
9740 emit_label (drop_through_label);
9741 }
9742
9743 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9744 with one insn, test the comparison and jump to the appropriate label. */
9745
9746 static void
9747 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9748 tree exp;
9749 rtx if_false_label, if_true_label;
9750 {
9751 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9752 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9753 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9754 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9755 int i;
9756 rtx drop_through_label = 0;
9757
9758 if (! if_false_label)
9759 drop_through_label = if_false_label = gen_label_rtx ();
9760
9761 for (i = 0; i < nwords; i++)
9762 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9763 operand_subword_force (op1, i, mode),
9764 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9765 word_mode, NULL_RTX, 0, if_false_label,
9766 NULL_RTX);
9767
9768 if (if_true_label)
9769 emit_jump (if_true_label);
9770 if (drop_through_label)
9771 emit_label (drop_through_label);
9772 }
9773 \f
9774 /* Jump according to whether OP0 is 0.
9775 We assume that OP0 has an integer mode that is too wide
9776 for the available compare insns. */
9777
9778 void
9779 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9780 rtx op0;
9781 rtx if_false_label, if_true_label;
9782 {
9783 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9784 rtx part;
9785 int i;
9786 rtx drop_through_label = 0;
9787
9788 /* The fastest way of doing this comparison on almost any machine is to
9789 "or" all the words and compare the result. If all have to be loaded
9790 from memory and this is a very wide item, it's possible this may
9791 be slower, but that's highly unlikely. */
9792
9793 part = gen_reg_rtx (word_mode);
9794 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9795 for (i = 1; i < nwords && part != 0; i++)
9796 part = expand_binop (word_mode, ior_optab, part,
9797 operand_subword_force (op0, i, GET_MODE (op0)),
9798 part, 1, OPTAB_WIDEN);
9799
9800 if (part != 0)
9801 {
9802 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9803 NULL_RTX, 0, if_false_label, if_true_label);
9804
9805 return;
9806 }
9807
9808 /* If we couldn't do the "or" simply, do this with a series of compares. */
9809 if (! if_false_label)
9810 drop_through_label = if_false_label = gen_label_rtx ();
9811
9812 for (i = 0; i < nwords; i++)
9813 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9814 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9815 if_false_label, NULL_RTX);
9816
9817 if (if_true_label)
9818 emit_jump (if_true_label);
9819
9820 if (drop_through_label)
9821 emit_label (drop_through_label);
9822 }
9823 \f
9824 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9825 (including code to compute the values to be compared)
9826 and set (CC0) according to the result.
9827 The decision as to signed or unsigned comparison must be made by the caller.
9828
9829 We force a stack adjustment unless there are currently
9830 things pushed on the stack that aren't yet used.
9831
9832 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9833 compared.
9834
9835 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9836 size of MODE should be used. */
9837
9838 rtx
9839 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9840 register rtx op0, op1;
9841 enum rtx_code code;
9842 int unsignedp;
9843 enum machine_mode mode;
9844 rtx size;
9845 unsigned int align;
9846 {
9847 rtx tem;
9848
9849 /* If one operand is constant, make it the second one. Only do this
9850 if the other operand is not constant as well. */
9851
9852 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9853 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9854 {
9855 tem = op0;
9856 op0 = op1;
9857 op1 = tem;
9858 code = swap_condition (code);
9859 }
9860
9861 if (flag_force_mem)
9862 {
9863 op0 = force_not_mem (op0);
9864 op1 = force_not_mem (op1);
9865 }
9866
9867 do_pending_stack_adjust ();
9868
9869 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9870 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9871 return tem;
9872
9873 #if 0
9874 /* There's no need to do this now that combine.c can eliminate lots of
9875 sign extensions. This can be less efficient in certain cases on other
9876 machines. */
9877
9878 /* If this is a signed equality comparison, we can do it as an
9879 unsigned comparison since zero-extension is cheaper than sign
9880 extension and comparisons with zero are done as unsigned. This is
9881 the case even on machines that can do fast sign extension, since
9882 zero-extension is easier to combine with other operations than
9883 sign-extension is. If we are comparing against a constant, we must
9884 convert it to what it would look like unsigned. */
9885 if ((code == EQ || code == NE) && ! unsignedp
9886 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9887 {
9888 if (GET_CODE (op1) == CONST_INT
9889 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9890 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9891 unsignedp = 1;
9892 }
9893 #endif
9894
9895 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9896
9897 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9898 }
9899
9900 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9901 The decision as to signed or unsigned comparison must be made by the caller.
9902
9903 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9904 compared.
9905
9906 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9907 size of MODE should be used. */
9908
9909 void
9910 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9911 if_false_label, if_true_label)
9912 register rtx op0, op1;
9913 enum rtx_code code;
9914 int unsignedp;
9915 enum machine_mode mode;
9916 rtx size;
9917 unsigned int align;
9918 rtx if_false_label, if_true_label;
9919 {
9920 rtx tem;
9921 int dummy_true_label = 0;
9922
9923 /* Reverse the comparison if that is safe and we want to jump if it is
9924 false. */
9925 if (! if_true_label && ! FLOAT_MODE_P (mode))
9926 {
9927 if_true_label = if_false_label;
9928 if_false_label = 0;
9929 code = reverse_condition (code);
9930 }
9931
9932 /* If one operand is constant, make it the second one. Only do this
9933 if the other operand is not constant as well. */
9934
9935 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9936 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9937 {
9938 tem = op0;
9939 op0 = op1;
9940 op1 = tem;
9941 code = swap_condition (code);
9942 }
9943
9944 if (flag_force_mem)
9945 {
9946 op0 = force_not_mem (op0);
9947 op1 = force_not_mem (op1);
9948 }
9949
9950 do_pending_stack_adjust ();
9951
9952 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9953 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9954 {
9955 if (tem == const_true_rtx)
9956 {
9957 if (if_true_label)
9958 emit_jump (if_true_label);
9959 }
9960 else
9961 {
9962 if (if_false_label)
9963 emit_jump (if_false_label);
9964 }
9965 return;
9966 }
9967
9968 #if 0
9969 /* There's no need to do this now that combine.c can eliminate lots of
9970 sign extensions. This can be less efficient in certain cases on other
9971 machines. */
9972
9973 /* If this is a signed equality comparison, we can do it as an
9974 unsigned comparison since zero-extension is cheaper than sign
9975 extension and comparisons with zero are done as unsigned. This is
9976 the case even on machines that can do fast sign extension, since
9977 zero-extension is easier to combine with other operations than
9978 sign-extension is. If we are comparing against a constant, we must
9979 convert it to what it would look like unsigned. */
9980 if ((code == EQ || code == NE) && ! unsignedp
9981 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9982 {
9983 if (GET_CODE (op1) == CONST_INT
9984 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9985 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9986 unsignedp = 1;
9987 }
9988 #endif
9989
9990 if (! if_true_label)
9991 {
9992 dummy_true_label = 1;
9993 if_true_label = gen_label_rtx ();
9994 }
9995
9996 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9997 if_true_label);
9998
9999 if (if_false_label)
10000 emit_jump (if_false_label);
10001 if (dummy_true_label)
10002 emit_label (if_true_label);
10003 }
10004
10005 /* Generate code for a comparison expression EXP (including code to compute
10006 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10007 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10008 generated code will drop through.
10009 SIGNED_CODE should be the rtx operation for this comparison for
10010 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10011
10012 We force a stack adjustment unless there are currently
10013 things pushed on the stack that aren't yet used. */
10014
10015 static void
10016 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10017 if_true_label)
10018 register tree exp;
10019 enum rtx_code signed_code, unsigned_code;
10020 rtx if_false_label, if_true_label;
10021 {
10022 unsigned int align0, align1;
10023 register rtx op0, op1;
10024 register tree type;
10025 register enum machine_mode mode;
10026 int unsignedp;
10027 enum rtx_code code;
10028
10029 /* Don't crash if the comparison was erroneous. */
10030 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10031 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10032 return;
10033
10034 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10035 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10036 mode = TYPE_MODE (type);
10037 unsignedp = TREE_UNSIGNED (type);
10038 code = unsignedp ? unsigned_code : signed_code;
10039
10040 #ifdef HAVE_canonicalize_funcptr_for_compare
10041 /* If function pointers need to be "canonicalized" before they can
10042 be reliably compared, then canonicalize them. */
10043 if (HAVE_canonicalize_funcptr_for_compare
10044 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10045 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10046 == FUNCTION_TYPE))
10047 {
10048 rtx new_op0 = gen_reg_rtx (mode);
10049
10050 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10051 op0 = new_op0;
10052 }
10053
10054 if (HAVE_canonicalize_funcptr_for_compare
10055 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10056 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10057 == FUNCTION_TYPE))
10058 {
10059 rtx new_op1 = gen_reg_rtx (mode);
10060
10061 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10062 op1 = new_op1;
10063 }
10064 #endif
10065
10066 /* Do any postincrements in the expression that was tested. */
10067 emit_queue ();
10068
10069 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10070 ((mode == BLKmode)
10071 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10072 MIN (align0, align1),
10073 if_false_label, if_true_label);
10074 }
10075 \f
10076 /* Generate code to calculate EXP using a store-flag instruction
10077 and return an rtx for the result. EXP is either a comparison
10078 or a TRUTH_NOT_EXPR whose operand is a comparison.
10079
10080 If TARGET is nonzero, store the result there if convenient.
10081
10082 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10083 cheap.
10084
10085 Return zero if there is no suitable set-flag instruction
10086 available on this machine.
10087
10088 Once expand_expr has been called on the arguments of the comparison,
10089 we are committed to doing the store flag, since it is not safe to
10090 re-evaluate the expression. We emit the store-flag insn by calling
10091 emit_store_flag, but only expand the arguments if we have a reason
10092 to believe that emit_store_flag will be successful. If we think that
10093 it will, but it isn't, we have to simulate the store-flag with a
10094 set/jump/set sequence. */
10095
10096 static rtx
10097 do_store_flag (exp, target, mode, only_cheap)
10098 tree exp;
10099 rtx target;
10100 enum machine_mode mode;
10101 int only_cheap;
10102 {
10103 enum rtx_code code;
10104 tree arg0, arg1, type;
10105 tree tem;
10106 enum machine_mode operand_mode;
10107 int invert = 0;
10108 int unsignedp;
10109 rtx op0, op1;
10110 enum insn_code icode;
10111 rtx subtarget = target;
10112 rtx result, label;
10113
10114 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10115 result at the end. We can't simply invert the test since it would
10116 have already been inverted if it were valid. This case occurs for
10117 some floating-point comparisons. */
10118
10119 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10120 invert = 1, exp = TREE_OPERAND (exp, 0);
10121
10122 arg0 = TREE_OPERAND (exp, 0);
10123 arg1 = TREE_OPERAND (exp, 1);
10124 type = TREE_TYPE (arg0);
10125 operand_mode = TYPE_MODE (type);
10126 unsignedp = TREE_UNSIGNED (type);
10127
10128 /* We won't bother with BLKmode store-flag operations because it would mean
10129 passing a lot of information to emit_store_flag. */
10130 if (operand_mode == BLKmode)
10131 return 0;
10132
10133 /* We won't bother with store-flag operations involving function pointers
10134 when function pointers must be canonicalized before comparisons. */
10135 #ifdef HAVE_canonicalize_funcptr_for_compare
10136 if (HAVE_canonicalize_funcptr_for_compare
10137 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10138 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10139 == FUNCTION_TYPE))
10140 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10141 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10142 == FUNCTION_TYPE))))
10143 return 0;
10144 #endif
10145
10146 STRIP_NOPS (arg0);
10147 STRIP_NOPS (arg1);
10148
10149 /* Get the rtx comparison code to use. We know that EXP is a comparison
10150 operation of some type. Some comparisons against 1 and -1 can be
10151 converted to comparisons with zero. Do so here so that the tests
10152 below will be aware that we have a comparison with zero. These
10153 tests will not catch constants in the first operand, but constants
10154 are rarely passed as the first operand. */
10155
10156 switch (TREE_CODE (exp))
10157 {
10158 case EQ_EXPR:
10159 code = EQ;
10160 break;
10161 case NE_EXPR:
10162 code = NE;
10163 break;
10164 case LT_EXPR:
10165 if (integer_onep (arg1))
10166 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10167 else
10168 code = unsignedp ? LTU : LT;
10169 break;
10170 case LE_EXPR:
10171 if (! unsignedp && integer_all_onesp (arg1))
10172 arg1 = integer_zero_node, code = LT;
10173 else
10174 code = unsignedp ? LEU : LE;
10175 break;
10176 case GT_EXPR:
10177 if (! unsignedp && integer_all_onesp (arg1))
10178 arg1 = integer_zero_node, code = GE;
10179 else
10180 code = unsignedp ? GTU : GT;
10181 break;
10182 case GE_EXPR:
10183 if (integer_onep (arg1))
10184 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10185 else
10186 code = unsignedp ? GEU : GE;
10187 break;
10188
10189 case UNORDERED_EXPR:
10190 code = UNORDERED;
10191 break;
10192 case ORDERED_EXPR:
10193 code = ORDERED;
10194 break;
10195 case UNLT_EXPR:
10196 code = UNLT;
10197 break;
10198 case UNLE_EXPR:
10199 code = UNLE;
10200 break;
10201 case UNGT_EXPR:
10202 code = UNGT;
10203 break;
10204 case UNGE_EXPR:
10205 code = UNGE;
10206 break;
10207 case UNEQ_EXPR:
10208 code = UNEQ;
10209 break;
10210
10211 default:
10212 abort ();
10213 }
10214
10215 /* Put a constant second. */
10216 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10217 {
10218 tem = arg0; arg0 = arg1; arg1 = tem;
10219 code = swap_condition (code);
10220 }
10221
10222 /* If this is an equality or inequality test of a single bit, we can
10223 do this by shifting the bit being tested to the low-order bit and
10224 masking the result with the constant 1. If the condition was EQ,
10225 we xor it with 1. This does not require an scc insn and is faster
10226 than an scc insn even if we have it. */
10227
10228 if ((code == NE || code == EQ)
10229 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10230 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10231 {
10232 tree inner = TREE_OPERAND (arg0, 0);
10233 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10234 int ops_unsignedp;
10235
10236 /* If INNER is a right shift of a constant and it plus BITNUM does
10237 not overflow, adjust BITNUM and INNER. */
10238
10239 if (TREE_CODE (inner) == RSHIFT_EXPR
10240 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10241 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10242 && bitnum < TYPE_PRECISION (type)
10243 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10244 bitnum - TYPE_PRECISION (type)))
10245 {
10246 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10247 inner = TREE_OPERAND (inner, 0);
10248 }
10249
10250 /* If we are going to be able to omit the AND below, we must do our
10251 operations as unsigned. If we must use the AND, we have a choice.
10252 Normally unsigned is faster, but for some machines signed is. */
10253 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10254 #ifdef LOAD_EXTEND_OP
10255 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10256 #else
10257 : 1
10258 #endif
10259 );
10260
10261 if (subtarget == 0 || GET_CODE (subtarget) != REG
10262 || GET_MODE (subtarget) != operand_mode
10263 || ! safe_from_p (subtarget, inner, 1))
10264 subtarget = 0;
10265
10266 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10267
10268 if (bitnum != 0)
10269 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10270 size_int (bitnum), subtarget, ops_unsignedp);
10271
10272 if (GET_MODE (op0) != mode)
10273 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10274
10275 if ((code == EQ && ! invert) || (code == NE && invert))
10276 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10277 ops_unsignedp, OPTAB_LIB_WIDEN);
10278
10279 /* Put the AND last so it can combine with more things. */
10280 if (bitnum != TYPE_PRECISION (type) - 1)
10281 op0 = expand_and (op0, const1_rtx, subtarget);
10282
10283 return op0;
10284 }
10285
10286 /* Now see if we are likely to be able to do this. Return if not. */
10287 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10288 return 0;
10289
10290 icode = setcc_gen_code[(int) code];
10291 if (icode == CODE_FOR_nothing
10292 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10293 {
10294 /* We can only do this if it is one of the special cases that
10295 can be handled without an scc insn. */
10296 if ((code == LT && integer_zerop (arg1))
10297 || (! only_cheap && code == GE && integer_zerop (arg1)))
10298 ;
10299 else if (BRANCH_COST >= 0
10300 && ! only_cheap && (code == NE || code == EQ)
10301 && TREE_CODE (type) != REAL_TYPE
10302 && ((abs_optab->handlers[(int) operand_mode].insn_code
10303 != CODE_FOR_nothing)
10304 || (ffs_optab->handlers[(int) operand_mode].insn_code
10305 != CODE_FOR_nothing)))
10306 ;
10307 else
10308 return 0;
10309 }
10310
10311 preexpand_calls (exp);
10312 if (subtarget == 0 || GET_CODE (subtarget) != REG
10313 || GET_MODE (subtarget) != operand_mode
10314 || ! safe_from_p (subtarget, arg1, 1))
10315 subtarget = 0;
10316
10317 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10318 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10319
10320 if (target == 0)
10321 target = gen_reg_rtx (mode);
10322
10323 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10324 because, if the emit_store_flag does anything it will succeed and
10325 OP0 and OP1 will not be used subsequently. */
10326
10327 result = emit_store_flag (target, code,
10328 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10329 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10330 operand_mode, unsignedp, 1);
10331
10332 if (result)
10333 {
10334 if (invert)
10335 result = expand_binop (mode, xor_optab, result, const1_rtx,
10336 result, 0, OPTAB_LIB_WIDEN);
10337 return result;
10338 }
10339
10340 /* If this failed, we have to do this with set/compare/jump/set code. */
10341 if (GET_CODE (target) != REG
10342 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10343 target = gen_reg_rtx (GET_MODE (target));
10344
10345 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10346 result = compare_from_rtx (op0, op1, code, unsignedp,
10347 operand_mode, NULL_RTX, 0);
10348 if (GET_CODE (result) == CONST_INT)
10349 return (((result == const0_rtx && ! invert)
10350 || (result != const0_rtx && invert))
10351 ? const0_rtx : const1_rtx);
10352
10353 label = gen_label_rtx ();
10354 if (bcc_gen_fctn[(int) code] == 0)
10355 abort ();
10356
10357 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10358 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10359 emit_label (label);
10360
10361 return target;
10362 }
10363 \f
10364 /* Generate a tablejump instruction (used for switch statements). */
10365
10366 #ifdef HAVE_tablejump
10367
10368 /* INDEX is the value being switched on, with the lowest value
10369 in the table already subtracted.
10370 MODE is its expected mode (needed if INDEX is constant).
10371 RANGE is the length of the jump table.
10372 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10373
10374 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10375 index value is out of range. */
10376
10377 void
10378 do_tablejump (index, mode, range, table_label, default_label)
10379 rtx index, range, table_label, default_label;
10380 enum machine_mode mode;
10381 {
10382 register rtx temp, vector;
10383
10384 /* Do an unsigned comparison (in the proper mode) between the index
10385 expression and the value which represents the length of the range.
10386 Since we just finished subtracting the lower bound of the range
10387 from the index expression, this comparison allows us to simultaneously
10388 check that the original index expression value is both greater than
10389 or equal to the minimum value of the range and less than or equal to
10390 the maximum value of the range. */
10391
10392 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10393 0, default_label);
10394
10395 /* If index is in range, it must fit in Pmode.
10396 Convert to Pmode so we can index with it. */
10397 if (mode != Pmode)
10398 index = convert_to_mode (Pmode, index, 1);
10399
10400 /* Don't let a MEM slip thru, because then INDEX that comes
10401 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10402 and break_out_memory_refs will go to work on it and mess it up. */
10403 #ifdef PIC_CASE_VECTOR_ADDRESS
10404 if (flag_pic && GET_CODE (index) != REG)
10405 index = copy_to_mode_reg (Pmode, index);
10406 #endif
10407
10408 /* If flag_force_addr were to affect this address
10409 it could interfere with the tricky assumptions made
10410 about addresses that contain label-refs,
10411 which may be valid only very near the tablejump itself. */
10412 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10413 GET_MODE_SIZE, because this indicates how large insns are. The other
10414 uses should all be Pmode, because they are addresses. This code
10415 could fail if addresses and insns are not the same size. */
10416 index = gen_rtx_PLUS (Pmode,
10417 gen_rtx_MULT (Pmode, index,
10418 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10419 gen_rtx_LABEL_REF (Pmode, table_label));
10420 #ifdef PIC_CASE_VECTOR_ADDRESS
10421 if (flag_pic)
10422 index = PIC_CASE_VECTOR_ADDRESS (index);
10423 else
10424 #endif
10425 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10426 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10427 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10428 RTX_UNCHANGING_P (vector) = 1;
10429 convert_move (temp, vector, 0);
10430
10431 emit_jump_insn (gen_tablejump (temp, table_label));
10432
10433 /* If we are generating PIC code or if the table is PC-relative, the
10434 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10435 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10436 emit_barrier ();
10437 }
10438
10439 #endif /* HAVE_tablejump */