current_function -> cfun
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "tm_p.h"
45
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
53
54 #ifdef PUSH_ROUNDING
55
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
59
60 #endif
61
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
69
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
73 #endif
74
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81 int cse_not_expected;
82
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
87
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage;
92
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list = 0;
95
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
99 {
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 int to_struct;
105 int to_readonly;
106 rtx from;
107 rtx from_addr;
108 int autinc_from;
109 int explicit_inc_from;
110 int from_struct;
111 int from_readonly;
112 int len;
113 int offset;
114 int reverse;
115 };
116
117 /* This structure is used by clear_by_pieces to describe the clear to
118 be performed. */
119
120 struct clear_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 int to_struct;
127 int len;
128 int offset;
129 int reverse;
130 };
131
132 extern struct obstack permanent_obstack;
133
134 static rtx get_push_address PROTO ((int));
135
136 static rtx enqueue_insn PROTO((rtx, rtx));
137 static int move_by_pieces_ninsns PROTO((unsigned int, int));
138 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static void clear_by_pieces PROTO((rtx, int, int));
141 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
142 enum machine_mode,
143 struct clear_by_pieces *));
144 static int is_zeros_p PROTO((tree));
145 static int mostly_zeros_p PROTO((tree));
146 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
147 tree, tree, int, int));
148 static void store_constructor PROTO((tree, rtx, int, int));
149 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
150 enum machine_mode, int, int,
151 int, int));
152 static enum memory_use_mode
153 get_memory_usage_from_modifier PROTO((enum expand_modifier));
154 static tree save_noncopied_parts PROTO((tree, tree));
155 static tree init_noncopied_parts PROTO((tree, tree));
156 static int safe_from_p PROTO((rtx, tree, int));
157 static int fixed_type_p PROTO((tree));
158 static rtx var_rtx PROTO((tree));
159 static int readonly_fields_p PROTO((tree));
160 static rtx expand_expr_unaligned PROTO((tree, int *));
161 static rtx expand_increment PROTO((tree, int, int));
162 static void preexpand_calls PROTO((tree));
163 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
164 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
165 static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
166 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
167
168 /* Record for each mode whether we can move a register directly to or
169 from an object of that mode in memory. If we can't, we won't try
170 to use that mode directly when accessing a field of that mode. */
171
172 static char direct_load[NUM_MACHINE_MODES];
173 static char direct_store[NUM_MACHINE_MODES];
174
175 /* If a memory-to-memory move would take MOVE_RATIO or more simple
176 move-instruction sequences, we will do a movstr or libcall instead. */
177
178 #ifndef MOVE_RATIO
179 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
180 #define MOVE_RATIO 2
181 #else
182 /* If we are optimizing for space (-Os), cut down the default move ratio */
183 #define MOVE_RATIO (optimize_size ? 3 : 15)
184 #endif
185 #endif
186
187 /* This macro is used to determine whether move_by_pieces should be called
188 to perform a structure copy. */
189 #ifndef MOVE_BY_PIECES_P
190 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
191 (SIZE, ALIGN) < MOVE_RATIO)
192 #endif
193
194 /* This array records the insn_code of insns to perform block moves. */
195 enum insn_code movstr_optab[NUM_MACHINE_MODES];
196
197 /* This array records the insn_code of insns to perform block clears. */
198 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
199
200 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
201
202 #ifndef SLOW_UNALIGNED_ACCESS
203 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
204 #endif
205 \f
206 /* This is run once per compilation to set up which modes can be used
207 directly in memory and to initialize the block move optab. */
208
209 void
210 init_expr_once ()
211 {
212 rtx insn, pat;
213 enum machine_mode mode;
214 int num_clobbers;
215 rtx mem, mem1;
216 char *free_point;
217
218 start_sequence ();
219
220 /* Since we are on the permanent obstack, we must be sure we save this
221 spot AFTER we call start_sequence, since it will reuse the rtl it
222 makes. */
223 free_point = (char *) oballoc (0);
224
225 /* Try indexing by frame ptr and try by stack ptr.
226 It is known that on the Convex the stack ptr isn't a valid index.
227 With luck, one or the other is valid on any machine. */
228 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
229 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
230
231 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
232 pat = PATTERN (insn);
233
234 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
235 mode = (enum machine_mode) ((int) mode + 1))
236 {
237 int regno;
238 rtx reg;
239
240 direct_load[(int) mode] = direct_store[(int) mode] = 0;
241 PUT_MODE (mem, mode);
242 PUT_MODE (mem1, mode);
243
244 /* See if there is some register that can be used in this mode and
245 directly loaded or stored from memory. */
246
247 if (mode != VOIDmode && mode != BLKmode)
248 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
249 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
250 regno++)
251 {
252 if (! HARD_REGNO_MODE_OK (regno, mode))
253 continue;
254
255 reg = gen_rtx_REG (mode, regno);
256
257 SET_SRC (pat) = mem;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
261
262 SET_SRC (pat) = mem1;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
266
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
271
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem1;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
276 }
277 }
278
279 end_sequence ();
280 obfree (free_point);
281 }
282
283 /* This is run at the start of compiling a function. */
284
285 void
286 init_expr ()
287 {
288 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
289
290 pending_chain = 0;
291 pending_stack_adjust = 0;
292 inhibit_defer_pop = 0;
293 saveregs_value = 0;
294 apply_args_value = 0;
295 forced_labels = 0;
296 }
297
298 void
299 mark_expr_status (p)
300 struct expr_status *p;
301 {
302 if (p == NULL)
303 return;
304
305 ggc_mark_rtx (p->x_saveregs_value);
306 ggc_mark_rtx (p->x_apply_args_value);
307 ggc_mark_rtx (p->x_forced_labels);
308 }
309
310 void
311 free_expr_status (f)
312 struct function *f;
313 {
314 free (f->expr);
315 f->expr = NULL;
316 }
317
318 /* Small sanity check that the queue is empty at the end of a function. */
319 void
320 finish_expr_for_function ()
321 {
322 if (pending_chain)
323 abort ();
324 }
325 \f
326 /* Manage the queue of increment instructions to be output
327 for POSTINCREMENT_EXPR expressions, etc. */
328
329 /* Queue up to increment (or change) VAR later. BODY says how:
330 BODY should be the same thing you would pass to emit_insn
331 to increment right away. It will go to emit_insn later on.
332
333 The value is a QUEUED expression to be used in place of VAR
334 where you want to guarantee the pre-incrementation value of VAR. */
335
336 static rtx
337 enqueue_insn (var, body)
338 rtx var, body;
339 {
340 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
341 body, pending_chain);
342 return pending_chain;
343 }
344
345 /* Use protect_from_queue to convert a QUEUED expression
346 into something that you can put immediately into an instruction.
347 If the queued incrementation has not happened yet,
348 protect_from_queue returns the variable itself.
349 If the incrementation has happened, protect_from_queue returns a temp
350 that contains a copy of the old value of the variable.
351
352 Any time an rtx which might possibly be a QUEUED is to be put
353 into an instruction, it must be passed through protect_from_queue first.
354 QUEUED expressions are not meaningful in instructions.
355
356 Do not pass a value through protect_from_queue and then hold
357 on to it for a while before putting it in an instruction!
358 If the queue is flushed in between, incorrect code will result. */
359
360 rtx
361 protect_from_queue (x, modify)
362 register rtx x;
363 int modify;
364 {
365 register RTX_CODE code = GET_CODE (x);
366
367 #if 0 /* A QUEUED can hang around after the queue is forced out. */
368 /* Shortcut for most common case. */
369 if (pending_chain == 0)
370 return x;
371 #endif
372
373 if (code != QUEUED)
374 {
375 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
376 use of autoincrement. Make a copy of the contents of the memory
377 location rather than a copy of the address, but not if the value is
378 of mode BLKmode. Don't modify X in place since it might be
379 shared. */
380 if (code == MEM && GET_MODE (x) != BLKmode
381 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
382 {
383 register rtx y = XEXP (x, 0);
384 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
385
386 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
387 MEM_COPY_ATTRIBUTES (new, x);
388 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
389
390 if (QUEUED_INSN (y))
391 {
392 register rtx temp = gen_reg_rtx (GET_MODE (new));
393 emit_insn_before (gen_move_insn (temp, new),
394 QUEUED_INSN (y));
395 return temp;
396 }
397 return new;
398 }
399 /* Otherwise, recursively protect the subexpressions of all
400 the kinds of rtx's that can contain a QUEUED. */
401 if (code == MEM)
402 {
403 rtx tem = protect_from_queue (XEXP (x, 0), 0);
404 if (tem != XEXP (x, 0))
405 {
406 x = copy_rtx (x);
407 XEXP (x, 0) = tem;
408 }
409 }
410 else if (code == PLUS || code == MULT)
411 {
412 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
413 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
414 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
415 {
416 x = copy_rtx (x);
417 XEXP (x, 0) = new0;
418 XEXP (x, 1) = new1;
419 }
420 }
421 return x;
422 }
423 /* If the increment has not happened, use the variable itself. */
424 if (QUEUED_INSN (x) == 0)
425 return QUEUED_VAR (x);
426 /* If the increment has happened and a pre-increment copy exists,
427 use that copy. */
428 if (QUEUED_COPY (x) != 0)
429 return QUEUED_COPY (x);
430 /* The increment has happened but we haven't set up a pre-increment copy.
431 Set one up now, and use it. */
432 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
433 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
434 QUEUED_INSN (x));
435 return QUEUED_COPY (x);
436 }
437
438 /* Return nonzero if X contains a QUEUED expression:
439 if it contains anything that will be altered by a queued increment.
440 We handle only combinations of MEM, PLUS, MINUS and MULT operators
441 since memory addresses generally contain only those. */
442
443 int
444 queued_subexp_p (x)
445 rtx x;
446 {
447 register enum rtx_code code = GET_CODE (x);
448 switch (code)
449 {
450 case QUEUED:
451 return 1;
452 case MEM:
453 return queued_subexp_p (XEXP (x, 0));
454 case MULT:
455 case PLUS:
456 case MINUS:
457 return (queued_subexp_p (XEXP (x, 0))
458 || queued_subexp_p (XEXP (x, 1)));
459 default:
460 return 0;
461 }
462 }
463
464 /* Perform all the pending incrementations. */
465
466 void
467 emit_queue ()
468 {
469 register rtx p;
470 while ((p = pending_chain))
471 {
472 rtx body = QUEUED_BODY (p);
473
474 if (GET_CODE (body) == SEQUENCE)
475 {
476 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
477 emit_insn (QUEUED_BODY (p));
478 }
479 else
480 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
481 pending_chain = QUEUED_NEXT (p);
482 }
483 }
484 \f
485 /* Copy data from FROM to TO, where the machine modes are not the same.
486 Both modes may be integer, or both may be floating.
487 UNSIGNEDP should be nonzero if FROM is an unsigned type.
488 This causes zero-extension instead of sign-extension. */
489
490 void
491 convert_move (to, from, unsignedp)
492 register rtx to, from;
493 int unsignedp;
494 {
495 enum machine_mode to_mode = GET_MODE (to);
496 enum machine_mode from_mode = GET_MODE (from);
497 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
498 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
499 enum insn_code code;
500 rtx libcall;
501
502 /* rtx code for making an equivalent value. */
503 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
504
505 to = protect_from_queue (to, 1);
506 from = protect_from_queue (from, 0);
507
508 if (to_real != from_real)
509 abort ();
510
511 /* If FROM is a SUBREG that indicates that we have already done at least
512 the required extension, strip it. We don't handle such SUBREGs as
513 TO here. */
514
515 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
516 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
517 >= GET_MODE_SIZE (to_mode))
518 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
519 from = gen_lowpart (to_mode, from), from_mode = to_mode;
520
521 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
522 abort ();
523
524 if (to_mode == from_mode
525 || (from_mode == VOIDmode && CONSTANT_P (from)))
526 {
527 emit_move_insn (to, from);
528 return;
529 }
530
531 if (to_real)
532 {
533 rtx value;
534
535 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
536 {
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, 0))
539 != CODE_FOR_nothing)
540 {
541 emit_unop_insn (code, to, from, UNKNOWN);
542 return;
543 }
544 }
545
546 #ifdef HAVE_trunchfqf2
547 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
548 {
549 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
550 return;
551 }
552 #endif
553 #ifdef HAVE_trunctqfqf2
554 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
555 {
556 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
557 return;
558 }
559 #endif
560 #ifdef HAVE_truncsfqf2
561 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
562 {
563 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
564 return;
565 }
566 #endif
567 #ifdef HAVE_truncdfqf2
568 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
569 {
570 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
571 return;
572 }
573 #endif
574 #ifdef HAVE_truncxfqf2
575 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
576 {
577 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
578 return;
579 }
580 #endif
581 #ifdef HAVE_trunctfqf2
582 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
583 {
584 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
585 return;
586 }
587 #endif
588
589 #ifdef HAVE_trunctqfhf2
590 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
591 {
592 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
593 return;
594 }
595 #endif
596 #ifdef HAVE_truncsfhf2
597 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
598 {
599 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
600 return;
601 }
602 #endif
603 #ifdef HAVE_truncdfhf2
604 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
605 {
606 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
607 return;
608 }
609 #endif
610 #ifdef HAVE_truncxfhf2
611 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
612 {
613 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
614 return;
615 }
616 #endif
617 #ifdef HAVE_trunctfhf2
618 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
619 {
620 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
621 return;
622 }
623 #endif
624
625 #ifdef HAVE_truncsftqf2
626 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
627 {
628 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
629 return;
630 }
631 #endif
632 #ifdef HAVE_truncdftqf2
633 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
634 {
635 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
636 return;
637 }
638 #endif
639 #ifdef HAVE_truncxftqf2
640 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
641 {
642 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
643 return;
644 }
645 #endif
646 #ifdef HAVE_trunctftqf2
647 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
648 {
649 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
650 return;
651 }
652 #endif
653
654 #ifdef HAVE_truncdfsf2
655 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
656 {
657 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661 #ifdef HAVE_truncxfsf2
662 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
665 return;
666 }
667 #endif
668 #ifdef HAVE_trunctfsf2
669 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
670 {
671 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
672 return;
673 }
674 #endif
675 #ifdef HAVE_truncxfdf2
676 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
677 {
678 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
679 return;
680 }
681 #endif
682 #ifdef HAVE_trunctfdf2
683 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
684 {
685 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
686 return;
687 }
688 #endif
689
690 libcall = (rtx) 0;
691 switch (from_mode)
692 {
693 case SFmode:
694 switch (to_mode)
695 {
696 case DFmode:
697 libcall = extendsfdf2_libfunc;
698 break;
699
700 case XFmode:
701 libcall = extendsfxf2_libfunc;
702 break;
703
704 case TFmode:
705 libcall = extendsftf2_libfunc;
706 break;
707
708 default:
709 break;
710 }
711 break;
712
713 case DFmode:
714 switch (to_mode)
715 {
716 case SFmode:
717 libcall = truncdfsf2_libfunc;
718 break;
719
720 case XFmode:
721 libcall = extenddfxf2_libfunc;
722 break;
723
724 case TFmode:
725 libcall = extenddftf2_libfunc;
726 break;
727
728 default:
729 break;
730 }
731 break;
732
733 case XFmode:
734 switch (to_mode)
735 {
736 case SFmode:
737 libcall = truncxfsf2_libfunc;
738 break;
739
740 case DFmode:
741 libcall = truncxfdf2_libfunc;
742 break;
743
744 default:
745 break;
746 }
747 break;
748
749 case TFmode:
750 switch (to_mode)
751 {
752 case SFmode:
753 libcall = trunctfsf2_libfunc;
754 break;
755
756 case DFmode:
757 libcall = trunctfdf2_libfunc;
758 break;
759
760 default:
761 break;
762 }
763 break;
764
765 default:
766 break;
767 }
768
769 if (libcall == (rtx) 0)
770 /* This conversion is not implemented yet. */
771 abort ();
772
773 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
774 1, from, from_mode);
775 emit_move_insn (to, value);
776 return;
777 }
778
779 /* Now both modes are integers. */
780
781 /* Handle expanding beyond a word. */
782 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
783 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
784 {
785 rtx insns;
786 rtx lowpart;
787 rtx fill_value;
788 rtx lowfrom;
789 int i;
790 enum machine_mode lowpart_mode;
791 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
792
793 /* Try converting directly if the insn is supported. */
794 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
795 != CODE_FOR_nothing)
796 {
797 /* If FROM is a SUBREG, put it into a register. Do this
798 so that we always generate the same set of insns for
799 better cse'ing; if an intermediate assignment occurred,
800 we won't be doing the operation directly on the SUBREG. */
801 if (optimize > 0 && GET_CODE (from) == SUBREG)
802 from = force_reg (from_mode, from);
803 emit_unop_insn (code, to, from, equiv_code);
804 return;
805 }
806 /* Next, try converting via full word. */
807 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
808 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
809 != CODE_FOR_nothing))
810 {
811 if (GET_CODE (to) == REG)
812 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
813 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
814 emit_unop_insn (code, to,
815 gen_lowpart (word_mode, to), equiv_code);
816 return;
817 }
818
819 /* No special multiword conversion insn; do it by hand. */
820 start_sequence ();
821
822 /* Since we will turn this into a no conflict block, we must ensure
823 that the source does not overlap the target. */
824
825 if (reg_overlap_mentioned_p (to, from))
826 from = force_reg (from_mode, from);
827
828 /* Get a copy of FROM widened to a word, if necessary. */
829 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
830 lowpart_mode = word_mode;
831 else
832 lowpart_mode = from_mode;
833
834 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
835
836 lowpart = gen_lowpart (lowpart_mode, to);
837 emit_move_insn (lowpart, lowfrom);
838
839 /* Compute the value to put in each remaining word. */
840 if (unsignedp)
841 fill_value = const0_rtx;
842 else
843 {
844 #ifdef HAVE_slt
845 if (HAVE_slt
846 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
847 && STORE_FLAG_VALUE == -1)
848 {
849 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
850 lowpart_mode, 0, 0);
851 fill_value = gen_reg_rtx (word_mode);
852 emit_insn (gen_slt (fill_value));
853 }
854 else
855 #endif
856 {
857 fill_value
858 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
859 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
860 NULL_RTX, 0);
861 fill_value = convert_to_mode (word_mode, fill_value, 1);
862 }
863 }
864
865 /* Fill the remaining words. */
866 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
867 {
868 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
869 rtx subword = operand_subword (to, index, 1, to_mode);
870
871 if (subword == 0)
872 abort ();
873
874 if (fill_value != subword)
875 emit_move_insn (subword, fill_value);
876 }
877
878 insns = get_insns ();
879 end_sequence ();
880
881 emit_no_conflict_block (insns, to, from, NULL_RTX,
882 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
883 return;
884 }
885
886 /* Truncating multi-word to a word or less. */
887 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
888 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
889 {
890 if (!((GET_CODE (from) == MEM
891 && ! MEM_VOLATILE_P (from)
892 && direct_load[(int) to_mode]
893 && ! mode_dependent_address_p (XEXP (from, 0)))
894 || GET_CODE (from) == REG
895 || GET_CODE (from) == SUBREG))
896 from = force_reg (from_mode, from);
897 convert_move (to, gen_lowpart (word_mode, from), 0);
898 return;
899 }
900
901 /* Handle pointer conversion */ /* SPEE 900220 */
902 if (to_mode == PQImode)
903 {
904 if (from_mode != QImode)
905 from = convert_to_mode (QImode, from, unsignedp);
906
907 #ifdef HAVE_truncqipqi2
908 if (HAVE_truncqipqi2)
909 {
910 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
911 return;
912 }
913 #endif /* HAVE_truncqipqi2 */
914 abort ();
915 }
916
917 if (from_mode == PQImode)
918 {
919 if (to_mode != QImode)
920 {
921 from = convert_to_mode (QImode, from, unsignedp);
922 from_mode = QImode;
923 }
924 else
925 {
926 #ifdef HAVE_extendpqiqi2
927 if (HAVE_extendpqiqi2)
928 {
929 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
930 return;
931 }
932 #endif /* HAVE_extendpqiqi2 */
933 abort ();
934 }
935 }
936
937 if (to_mode == PSImode)
938 {
939 if (from_mode != SImode)
940 from = convert_to_mode (SImode, from, unsignedp);
941
942 #ifdef HAVE_truncsipsi2
943 if (HAVE_truncsipsi2)
944 {
945 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
946 return;
947 }
948 #endif /* HAVE_truncsipsi2 */
949 abort ();
950 }
951
952 if (from_mode == PSImode)
953 {
954 if (to_mode != SImode)
955 {
956 from = convert_to_mode (SImode, from, unsignedp);
957 from_mode = SImode;
958 }
959 else
960 {
961 #ifdef HAVE_extendpsisi2
962 if (HAVE_extendpsisi2)
963 {
964 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
965 return;
966 }
967 #endif /* HAVE_extendpsisi2 */
968 abort ();
969 }
970 }
971
972 if (to_mode == PDImode)
973 {
974 if (from_mode != DImode)
975 from = convert_to_mode (DImode, from, unsignedp);
976
977 #ifdef HAVE_truncdipdi2
978 if (HAVE_truncdipdi2)
979 {
980 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
981 return;
982 }
983 #endif /* HAVE_truncdipdi2 */
984 abort ();
985 }
986
987 if (from_mode == PDImode)
988 {
989 if (to_mode != DImode)
990 {
991 from = convert_to_mode (DImode, from, unsignedp);
992 from_mode = DImode;
993 }
994 else
995 {
996 #ifdef HAVE_extendpdidi2
997 if (HAVE_extendpdidi2)
998 {
999 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_extendpdidi2 */
1003 abort ();
1004 }
1005 }
1006
1007 /* Now follow all the conversions between integers
1008 no more than a word long. */
1009
1010 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1011 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1012 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1013 GET_MODE_BITSIZE (from_mode)))
1014 {
1015 if (!((GET_CODE (from) == MEM
1016 && ! MEM_VOLATILE_P (from)
1017 && direct_load[(int) to_mode]
1018 && ! mode_dependent_address_p (XEXP (from, 0)))
1019 || GET_CODE (from) == REG
1020 || GET_CODE (from) == SUBREG))
1021 from = force_reg (from_mode, from);
1022 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1023 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1024 from = copy_to_reg (from);
1025 emit_move_insn (to, gen_lowpart (to_mode, from));
1026 return;
1027 }
1028
1029 /* Handle extension. */
1030 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1031 {
1032 /* Convert directly if that works. */
1033 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1034 != CODE_FOR_nothing)
1035 {
1036 emit_unop_insn (code, to, from, equiv_code);
1037 return;
1038 }
1039 else
1040 {
1041 enum machine_mode intermediate;
1042 rtx tmp;
1043 tree shift_amount;
1044
1045 /* Search for a mode to convert via. */
1046 for (intermediate = from_mode; intermediate != VOIDmode;
1047 intermediate = GET_MODE_WIDER_MODE (intermediate))
1048 if (((can_extend_p (to_mode, intermediate, unsignedp)
1049 != CODE_FOR_nothing)
1050 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1051 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1052 GET_MODE_BITSIZE (intermediate))))
1053 && (can_extend_p (intermediate, from_mode, unsignedp)
1054 != CODE_FOR_nothing))
1055 {
1056 convert_move (to, convert_to_mode (intermediate, from,
1057 unsignedp), unsignedp);
1058 return;
1059 }
1060
1061 /* No suitable intermediate mode.
1062 Generate what we need with shifts. */
1063 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1064 - GET_MODE_BITSIZE (from_mode), 0);
1065 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1066 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1067 to, unsignedp);
1068 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1069 to, unsignedp);
1070 if (tmp != to)
1071 emit_move_insn (to, tmp);
1072 return;
1073 }
1074 }
1075
1076 /* Support special truncate insns for certain modes. */
1077
1078 if (from_mode == DImode && to_mode == SImode)
1079 {
1080 #ifdef HAVE_truncdisi2
1081 if (HAVE_truncdisi2)
1082 {
1083 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1084 return;
1085 }
1086 #endif
1087 convert_move (to, force_reg (from_mode, from), unsignedp);
1088 return;
1089 }
1090
1091 if (from_mode == DImode && to_mode == HImode)
1092 {
1093 #ifdef HAVE_truncdihi2
1094 if (HAVE_truncdihi2)
1095 {
1096 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1097 return;
1098 }
1099 #endif
1100 convert_move (to, force_reg (from_mode, from), unsignedp);
1101 return;
1102 }
1103
1104 if (from_mode == DImode && to_mode == QImode)
1105 {
1106 #ifdef HAVE_truncdiqi2
1107 if (HAVE_truncdiqi2)
1108 {
1109 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1110 return;
1111 }
1112 #endif
1113 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 return;
1115 }
1116
1117 if (from_mode == SImode && to_mode == HImode)
1118 {
1119 #ifdef HAVE_truncsihi2
1120 if (HAVE_truncsihi2)
1121 {
1122 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1123 return;
1124 }
1125 #endif
1126 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 return;
1128 }
1129
1130 if (from_mode == SImode && to_mode == QImode)
1131 {
1132 #ifdef HAVE_truncsiqi2
1133 if (HAVE_truncsiqi2)
1134 {
1135 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1136 return;
1137 }
1138 #endif
1139 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 return;
1141 }
1142
1143 if (from_mode == HImode && to_mode == QImode)
1144 {
1145 #ifdef HAVE_trunchiqi2
1146 if (HAVE_trunchiqi2)
1147 {
1148 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1149 return;
1150 }
1151 #endif
1152 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 return;
1154 }
1155
1156 if (from_mode == TImode && to_mode == DImode)
1157 {
1158 #ifdef HAVE_trunctidi2
1159 if (HAVE_trunctidi2)
1160 {
1161 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1162 return;
1163 }
1164 #endif
1165 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 return;
1167 }
1168
1169 if (from_mode == TImode && to_mode == SImode)
1170 {
1171 #ifdef HAVE_trunctisi2
1172 if (HAVE_trunctisi2)
1173 {
1174 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1175 return;
1176 }
1177 #endif
1178 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 return;
1180 }
1181
1182 if (from_mode == TImode && to_mode == HImode)
1183 {
1184 #ifdef HAVE_trunctihi2
1185 if (HAVE_trunctihi2)
1186 {
1187 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1188 return;
1189 }
1190 #endif
1191 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 return;
1193 }
1194
1195 if (from_mode == TImode && to_mode == QImode)
1196 {
1197 #ifdef HAVE_trunctiqi2
1198 if (HAVE_trunctiqi2)
1199 {
1200 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1201 return;
1202 }
1203 #endif
1204 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 return;
1206 }
1207
1208 /* Handle truncation of volatile memrefs, and so on;
1209 the things that couldn't be truncated directly,
1210 and for which there was no special instruction. */
1211 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1212 {
1213 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1214 emit_move_insn (to, temp);
1215 return;
1216 }
1217
1218 /* Mode combination is not recognized. */
1219 abort ();
1220 }
1221
1222 /* Return an rtx for a value that would result
1223 from converting X to mode MODE.
1224 Both X and MODE may be floating, or both integer.
1225 UNSIGNEDP is nonzero if X is an unsigned value.
1226 This can be done by referring to a part of X in place
1227 or by copying to a new temporary with conversion.
1228
1229 This function *must not* call protect_from_queue
1230 except when putting X into an insn (in which case convert_move does it). */
1231
1232 rtx
1233 convert_to_mode (mode, x, unsignedp)
1234 enum machine_mode mode;
1235 rtx x;
1236 int unsignedp;
1237 {
1238 return convert_modes (mode, VOIDmode, x, unsignedp);
1239 }
1240
1241 /* Return an rtx for a value that would result
1242 from converting X from mode OLDMODE to mode MODE.
1243 Both modes may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245
1246 This can be done by referring to a part of X in place
1247 or by copying to a new temporary with conversion.
1248
1249 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1250
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1253
1254 rtx
1255 convert_modes (mode, oldmode, x, unsignedp)
1256 enum machine_mode mode, oldmode;
1257 rtx x;
1258 int unsignedp;
1259 {
1260 register rtx temp;
1261
1262 /* If FROM is a SUBREG that indicates that we have already done at least
1263 the required extension, strip it. */
1264
1265 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1266 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1267 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1268 x = gen_lowpart (mode, x);
1269
1270 if (GET_MODE (x) != VOIDmode)
1271 oldmode = GET_MODE (x);
1272
1273 if (mode == oldmode)
1274 return x;
1275
1276 /* There is one case that we must handle specially: If we are converting
1277 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1278 we are to interpret the constant as unsigned, gen_lowpart will do
1279 the wrong if the constant appears negative. What we want to do is
1280 make the high-order word of the constant zero, not all ones. */
1281
1282 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1283 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1284 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1285 {
1286 HOST_WIDE_INT val = INTVAL (x);
1287
1288 if (oldmode != VOIDmode
1289 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1290 {
1291 int width = GET_MODE_BITSIZE (oldmode);
1292
1293 /* We need to zero extend VAL. */
1294 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1295 }
1296
1297 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1298 }
1299
1300 /* We can do this with a gen_lowpart if both desired and current modes
1301 are integer, and this is either a constant integer, a register, or a
1302 non-volatile MEM. Except for the constant case where MODE is no
1303 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1304
1305 if ((GET_CODE (x) == CONST_INT
1306 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1307 || (GET_MODE_CLASS (mode) == MODE_INT
1308 && GET_MODE_CLASS (oldmode) == MODE_INT
1309 && (GET_CODE (x) == CONST_DOUBLE
1310 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1311 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1312 && direct_load[(int) mode])
1313 || (GET_CODE (x) == REG
1314 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1315 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1316 {
1317 /* ?? If we don't know OLDMODE, we have to assume here that
1318 X does not need sign- or zero-extension. This may not be
1319 the case, but it's the best we can do. */
1320 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1321 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1322 {
1323 HOST_WIDE_INT val = INTVAL (x);
1324 int width = GET_MODE_BITSIZE (oldmode);
1325
1326 /* We must sign or zero-extend in this case. Start by
1327 zero-extending, then sign extend if we need to. */
1328 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1329 if (! unsignedp
1330 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1331 val |= (HOST_WIDE_INT) (-1) << width;
1332
1333 return GEN_INT (val);
1334 }
1335
1336 return gen_lowpart (mode, x);
1337 }
1338
1339 temp = gen_reg_rtx (mode);
1340 convert_move (temp, x, unsignedp);
1341 return temp;
1342 }
1343 \f
1344
1345 /* This macro is used to determine what the largest unit size that
1346 move_by_pieces can use is. */
1347
1348 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1349 move efficiently, as opposed to MOVE_MAX which is the maximum
1350 number of bhytes we can move with a single instruction. */
1351
1352 #ifndef MOVE_MAX_PIECES
1353 #define MOVE_MAX_PIECES MOVE_MAX
1354 #endif
1355
1356 /* Generate several move instructions to copy LEN bytes
1357 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1358 The caller must pass FROM and TO
1359 through protect_from_queue before calling.
1360 ALIGN (in bytes) is maximum alignment we can assume. */
1361
1362 void
1363 move_by_pieces (to, from, len, align)
1364 rtx to, from;
1365 int len, align;
1366 {
1367 struct move_by_pieces data;
1368 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1369 int max_size = MOVE_MAX_PIECES + 1;
1370 enum machine_mode mode = VOIDmode, tmode;
1371 enum insn_code icode;
1372
1373 data.offset = 0;
1374 data.to_addr = to_addr;
1375 data.from_addr = from_addr;
1376 data.to = to;
1377 data.from = from;
1378 data.autinc_to
1379 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1380 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1381 data.autinc_from
1382 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1383 || GET_CODE (from_addr) == POST_INC
1384 || GET_CODE (from_addr) == POST_DEC);
1385
1386 data.explicit_inc_from = 0;
1387 data.explicit_inc_to = 0;
1388 data.reverse
1389 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1390 if (data.reverse) data.offset = len;
1391 data.len = len;
1392
1393 data.to_struct = MEM_IN_STRUCT_P (to);
1394 data.from_struct = MEM_IN_STRUCT_P (from);
1395 data.to_readonly = RTX_UNCHANGING_P (to);
1396 data.from_readonly = RTX_UNCHANGING_P (from);
1397
1398 /* If copying requires more than two move insns,
1399 copy addresses to registers (to make displacements shorter)
1400 and use post-increment if available. */
1401 if (!(data.autinc_from && data.autinc_to)
1402 && move_by_pieces_ninsns (len, align) > 2)
1403 {
1404 /* Find the mode of the largest move... */
1405 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1406 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1407 if (GET_MODE_SIZE (tmode) < max_size)
1408 mode = tmode;
1409
1410 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1411 {
1412 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1413 data.autinc_from = 1;
1414 data.explicit_inc_from = -1;
1415 }
1416 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1417 {
1418 data.from_addr = copy_addr_to_reg (from_addr);
1419 data.autinc_from = 1;
1420 data.explicit_inc_from = 1;
1421 }
1422 if (!data.autinc_from && CONSTANT_P (from_addr))
1423 data.from_addr = copy_addr_to_reg (from_addr);
1424 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1425 {
1426 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1427 data.autinc_to = 1;
1428 data.explicit_inc_to = -1;
1429 }
1430 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1431 {
1432 data.to_addr = copy_addr_to_reg (to_addr);
1433 data.autinc_to = 1;
1434 data.explicit_inc_to = 1;
1435 }
1436 if (!data.autinc_to && CONSTANT_P (to_addr))
1437 data.to_addr = copy_addr_to_reg (to_addr);
1438 }
1439
1440 if (! SLOW_UNALIGNED_ACCESS
1441 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1442 align = MOVE_MAX;
1443
1444 /* First move what we can in the largest integer mode, then go to
1445 successively smaller modes. */
1446
1447 while (max_size > 1)
1448 {
1449 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1450 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1451 if (GET_MODE_SIZE (tmode) < max_size)
1452 mode = tmode;
1453
1454 if (mode == VOIDmode)
1455 break;
1456
1457 icode = mov_optab->handlers[(int) mode].insn_code;
1458 if (icode != CODE_FOR_nothing
1459 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1460 GET_MODE_SIZE (mode)))
1461 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1462
1463 max_size = GET_MODE_SIZE (mode);
1464 }
1465
1466 /* The code above should have handled everything. */
1467 if (data.len > 0)
1468 abort ();
1469 }
1470
1471 /* Return number of insns required to move L bytes by pieces.
1472 ALIGN (in bytes) is maximum alignment we can assume. */
1473
1474 static int
1475 move_by_pieces_ninsns (l, align)
1476 unsigned int l;
1477 int align;
1478 {
1479 register int n_insns = 0;
1480 int max_size = MOVE_MAX + 1;
1481
1482 if (! SLOW_UNALIGNED_ACCESS
1483 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1484 align = MOVE_MAX;
1485
1486 while (max_size > 1)
1487 {
1488 enum machine_mode mode = VOIDmode, tmode;
1489 enum insn_code icode;
1490
1491 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1492 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1493 if (GET_MODE_SIZE (tmode) < max_size)
1494 mode = tmode;
1495
1496 if (mode == VOIDmode)
1497 break;
1498
1499 icode = mov_optab->handlers[(int) mode].insn_code;
1500 if (icode != CODE_FOR_nothing
1501 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1502 GET_MODE_SIZE (mode)))
1503 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1504
1505 max_size = GET_MODE_SIZE (mode);
1506 }
1507
1508 return n_insns;
1509 }
1510
1511 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1512 with move instructions for mode MODE. GENFUN is the gen_... function
1513 to make a move insn for that mode. DATA has all the other info. */
1514
1515 static void
1516 move_by_pieces_1 (genfun, mode, data)
1517 rtx (*genfun) PROTO ((rtx, ...));
1518 enum machine_mode mode;
1519 struct move_by_pieces *data;
1520 {
1521 register int size = GET_MODE_SIZE (mode);
1522 register rtx to1, from1;
1523
1524 while (data->len >= size)
1525 {
1526 if (data->reverse) data->offset -= size;
1527
1528 to1 = (data->autinc_to
1529 ? gen_rtx_MEM (mode, data->to_addr)
1530 : copy_rtx (change_address (data->to, mode,
1531 plus_constant (data->to_addr,
1532 data->offset))));
1533 MEM_IN_STRUCT_P (to1) = data->to_struct;
1534 RTX_UNCHANGING_P (to1) = data->to_readonly;
1535
1536 from1
1537 = (data->autinc_from
1538 ? gen_rtx_MEM (mode, data->from_addr)
1539 : copy_rtx (change_address (data->from, mode,
1540 plus_constant (data->from_addr,
1541 data->offset))));
1542 MEM_IN_STRUCT_P (from1) = data->from_struct;
1543 RTX_UNCHANGING_P (from1) = data->from_readonly;
1544
1545 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1546 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1547 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1548 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1549
1550 emit_insn ((*genfun) (to1, from1));
1551 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1552 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1553 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1554 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1555
1556 if (! data->reverse) data->offset += size;
1557
1558 data->len -= size;
1559 }
1560 }
1561 \f
1562 /* Emit code to move a block Y to a block X.
1563 This may be done with string-move instructions,
1564 with multiple scalar move instructions, or with a library call.
1565
1566 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1567 with mode BLKmode.
1568 SIZE is an rtx that says how long they are.
1569 ALIGN is the maximum alignment we can assume they have,
1570 measured in bytes.
1571
1572 Return the address of the new block, if memcpy is called and returns it,
1573 0 otherwise. */
1574
1575 rtx
1576 emit_block_move (x, y, size, align)
1577 rtx x, y;
1578 rtx size;
1579 int align;
1580 {
1581 rtx retval = 0;
1582 #ifdef TARGET_MEM_FUNCTIONS
1583 static tree fn;
1584 tree call_expr, arg_list;
1585 #endif
1586
1587 if (GET_MODE (x) != BLKmode)
1588 abort ();
1589
1590 if (GET_MODE (y) != BLKmode)
1591 abort ();
1592
1593 x = protect_from_queue (x, 1);
1594 y = protect_from_queue (y, 0);
1595 size = protect_from_queue (size, 0);
1596
1597 if (GET_CODE (x) != MEM)
1598 abort ();
1599 if (GET_CODE (y) != MEM)
1600 abort ();
1601 if (size == 0)
1602 abort ();
1603
1604 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1605 move_by_pieces (x, y, INTVAL (size), align);
1606 else
1607 {
1608 /* Try the most limited insn first, because there's no point
1609 including more than one in the machine description unless
1610 the more limited one has some advantage. */
1611
1612 rtx opalign = GEN_INT (align);
1613 enum machine_mode mode;
1614
1615 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1616 mode = GET_MODE_WIDER_MODE (mode))
1617 {
1618 enum insn_code code = movstr_optab[(int) mode];
1619 insn_operand_predicate_fn pred;
1620
1621 if (code != CODE_FOR_nothing
1622 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1623 here because if SIZE is less than the mode mask, as it is
1624 returned by the macro, it will definitely be less than the
1625 actual mode mask. */
1626 && ((GET_CODE (size) == CONST_INT
1627 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1628 <= (GET_MODE_MASK (mode) >> 1)))
1629 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1630 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1631 || (*pred) (x, BLKmode))
1632 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1633 || (*pred) (y, BLKmode))
1634 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1635 || (*pred) (opalign, VOIDmode)))
1636 {
1637 rtx op2;
1638 rtx last = get_last_insn ();
1639 rtx pat;
1640
1641 op2 = convert_to_mode (mode, size, 1);
1642 pred = insn_data[(int) code].operand[2].predicate;
1643 if (pred != 0 && ! (*pred) (op2, mode))
1644 op2 = copy_to_mode_reg (mode, op2);
1645
1646 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1647 if (pat)
1648 {
1649 emit_insn (pat);
1650 return 0;
1651 }
1652 else
1653 delete_insns_since (last);
1654 }
1655 }
1656
1657 /* X, Y, or SIZE may have been passed through protect_from_queue.
1658
1659 It is unsafe to save the value generated by protect_from_queue
1660 and reuse it later. Consider what happens if emit_queue is
1661 called before the return value from protect_from_queue is used.
1662
1663 Expansion of the CALL_EXPR below will call emit_queue before
1664 we are finished emitting RTL for argument setup. So if we are
1665 not careful we could get the wrong value for an argument.
1666
1667 To avoid this problem we go ahead and emit code to copy X, Y &
1668 SIZE into new pseudos. We can then place those new pseudos
1669 into an RTL_EXPR and use them later, even after a call to
1670 emit_queue.
1671
1672 Note this is not strictly needed for library calls since they
1673 do not call emit_queue before loading their arguments. However,
1674 we may need to have library calls call emit_queue in the future
1675 since failing to do so could cause problems for targets which
1676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1677 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1678 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1679
1680 #ifdef TARGET_MEM_FUNCTIONS
1681 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1682 #else
1683 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1684 TREE_UNSIGNED (integer_type_node));
1685 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1686 #endif
1687
1688 #ifdef TARGET_MEM_FUNCTIONS
1689 /* It is incorrect to use the libcall calling conventions to call
1690 memcpy in this context.
1691
1692 This could be a user call to memcpy and the user may wish to
1693 examine the return value from memcpy.
1694
1695 For targets where libcalls and normal calls have different conventions
1696 for returning pointers, we could end up generating incorrect code.
1697
1698 So instead of using a libcall sequence we build up a suitable
1699 CALL_EXPR and expand the call in the normal fashion. */
1700 if (fn == NULL_TREE)
1701 {
1702 tree fntype;
1703
1704 /* This was copied from except.c, I don't know if all this is
1705 necessary in this context or not. */
1706 fn = get_identifier ("memcpy");
1707 push_obstacks_nochange ();
1708 end_temporary_allocation ();
1709 fntype = build_pointer_type (void_type_node);
1710 fntype = build_function_type (fntype, NULL_TREE);
1711 fn = build_decl (FUNCTION_DECL, fn, fntype);
1712 ggc_add_tree_root (&fn, 1);
1713 DECL_EXTERNAL (fn) = 1;
1714 TREE_PUBLIC (fn) = 1;
1715 DECL_ARTIFICIAL (fn) = 1;
1716 make_decl_rtl (fn, NULL_PTR, 1);
1717 assemble_external (fn);
1718 pop_obstacks ();
1719 }
1720
1721 /* We need to make an argument list for the function call.
1722
1723 memcpy has three arguments, the first two are void * addresses and
1724 the last is a size_t byte count for the copy. */
1725 arg_list
1726 = build_tree_list (NULL_TREE,
1727 make_tree (build_pointer_type (void_type_node), x));
1728 TREE_CHAIN (arg_list)
1729 = build_tree_list (NULL_TREE,
1730 make_tree (build_pointer_type (void_type_node), y));
1731 TREE_CHAIN (TREE_CHAIN (arg_list))
1732 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1733 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1734
1735 /* Now we have to build up the CALL_EXPR itself. */
1736 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1737 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1738 call_expr, arg_list, NULL_TREE);
1739 TREE_SIDE_EFFECTS (call_expr) = 1;
1740
1741 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1742 #else
1743 emit_library_call (bcopy_libfunc, 0,
1744 VOIDmode, 3, y, Pmode, x, Pmode,
1745 convert_to_mode (TYPE_MODE (integer_type_node), size,
1746 TREE_UNSIGNED (integer_type_node)),
1747 TYPE_MODE (integer_type_node));
1748 #endif
1749 }
1750
1751 return retval;
1752 }
1753 \f
1754 /* Copy all or part of a value X into registers starting at REGNO.
1755 The number of registers to be filled is NREGS. */
1756
1757 void
1758 move_block_to_reg (regno, x, nregs, mode)
1759 int regno;
1760 rtx x;
1761 int nregs;
1762 enum machine_mode mode;
1763 {
1764 int i;
1765 #ifdef HAVE_load_multiple
1766 rtx pat;
1767 rtx last;
1768 #endif
1769
1770 if (nregs == 0)
1771 return;
1772
1773 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1774 x = validize_mem (force_const_mem (mode, x));
1775
1776 /* See if the machine can do this with a load multiple insn. */
1777 #ifdef HAVE_load_multiple
1778 if (HAVE_load_multiple)
1779 {
1780 last = get_last_insn ();
1781 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1782 GEN_INT (nregs));
1783 if (pat)
1784 {
1785 emit_insn (pat);
1786 return;
1787 }
1788 else
1789 delete_insns_since (last);
1790 }
1791 #endif
1792
1793 for (i = 0; i < nregs; i++)
1794 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1795 operand_subword_force (x, i, mode));
1796 }
1797
1798 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1799 The number of registers to be filled is NREGS. SIZE indicates the number
1800 of bytes in the object X. */
1801
1802
1803 void
1804 move_block_from_reg (regno, x, nregs, size)
1805 int regno;
1806 rtx x;
1807 int nregs;
1808 int size;
1809 {
1810 int i;
1811 #ifdef HAVE_store_multiple
1812 rtx pat;
1813 rtx last;
1814 #endif
1815 enum machine_mode mode;
1816
1817 /* If SIZE is that of a mode no bigger than a word, just use that
1818 mode's store operation. */
1819 if (size <= UNITS_PER_WORD
1820 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1821 {
1822 emit_move_insn (change_address (x, mode, NULL),
1823 gen_rtx_REG (mode, regno));
1824 return;
1825 }
1826
1827 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1828 to the left before storing to memory. Note that the previous test
1829 doesn't handle all cases (e.g. SIZE == 3). */
1830 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1831 {
1832 rtx tem = operand_subword (x, 0, 1, BLKmode);
1833 rtx shift;
1834
1835 if (tem == 0)
1836 abort ();
1837
1838 shift = expand_shift (LSHIFT_EXPR, word_mode,
1839 gen_rtx_REG (word_mode, regno),
1840 build_int_2 ((UNITS_PER_WORD - size)
1841 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1842 emit_move_insn (tem, shift);
1843 return;
1844 }
1845
1846 /* See if the machine can do this with a store multiple insn. */
1847 #ifdef HAVE_store_multiple
1848 if (HAVE_store_multiple)
1849 {
1850 last = get_last_insn ();
1851 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1852 GEN_INT (nregs));
1853 if (pat)
1854 {
1855 emit_insn (pat);
1856 return;
1857 }
1858 else
1859 delete_insns_since (last);
1860 }
1861 #endif
1862
1863 for (i = 0; i < nregs; i++)
1864 {
1865 rtx tem = operand_subword (x, i, 1, BLKmode);
1866
1867 if (tem == 0)
1868 abort ();
1869
1870 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1871 }
1872 }
1873
1874 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1875 registers represented by a PARALLEL. SSIZE represents the total size of
1876 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1877 SRC in bits. */
1878 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1879 the balance will be in what would be the low-order memory addresses, i.e.
1880 left justified for big endian, right justified for little endian. This
1881 happens to be true for the targets currently using this support. If this
1882 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1883 would be needed. */
1884
1885 void
1886 emit_group_load (dst, orig_src, ssize, align)
1887 rtx dst, orig_src;
1888 int align, ssize;
1889 {
1890 rtx *tmps, src;
1891 int start, i;
1892
1893 if (GET_CODE (dst) != PARALLEL)
1894 abort ();
1895
1896 /* Check for a NULL entry, used to indicate that the parameter goes
1897 both on the stack and in registers. */
1898 if (XEXP (XVECEXP (dst, 0, 0), 0))
1899 start = 0;
1900 else
1901 start = 1;
1902
1903 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1904
1905 /* If we won't be loading directly from memory, protect the real source
1906 from strange tricks we might play. */
1907 src = orig_src;
1908 if (GET_CODE (src) != MEM)
1909 {
1910 if (GET_CODE (src) == VOIDmode)
1911 src = gen_reg_rtx (GET_MODE (dst));
1912 else
1913 src = gen_reg_rtx (GET_MODE (orig_src));
1914 emit_move_insn (src, orig_src);
1915 }
1916
1917 /* Process the pieces. */
1918 for (i = start; i < XVECLEN (dst, 0); i++)
1919 {
1920 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1921 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1922 int bytelen = GET_MODE_SIZE (mode);
1923 int shift = 0;
1924
1925 /* Handle trailing fragments that run over the size of the struct. */
1926 if (ssize >= 0 && bytepos + bytelen > ssize)
1927 {
1928 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1929 bytelen = ssize - bytepos;
1930 if (bytelen <= 0)
1931 abort();
1932 }
1933
1934 /* Optimize the access just a bit. */
1935 if (GET_CODE (src) == MEM
1936 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1937 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1938 && bytelen == GET_MODE_SIZE (mode))
1939 {
1940 tmps[i] = gen_reg_rtx (mode);
1941 emit_move_insn (tmps[i],
1942 change_address (src, mode,
1943 plus_constant (XEXP (src, 0),
1944 bytepos)));
1945 }
1946 else if (GET_CODE (src) == CONCAT)
1947 {
1948 if (bytepos == 0
1949 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1950 tmps[i] = XEXP (src, 0);
1951 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1952 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1953 tmps[i] = XEXP (src, 1);
1954 else
1955 abort ();
1956 }
1957 else
1958 {
1959 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1960 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1961 mode, mode, align, ssize);
1962 }
1963
1964 if (BYTES_BIG_ENDIAN && shift)
1965 {
1966 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1967 tmps[i], 0, OPTAB_WIDEN);
1968 }
1969 }
1970 emit_queue();
1971
1972 /* Copy the extracted pieces into the proper (probable) hard regs. */
1973 for (i = start; i < XVECLEN (dst, 0); i++)
1974 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1975 }
1976
1977 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1978 registers represented by a PARALLEL. SSIZE represents the total size of
1979 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1980
1981 void
1982 emit_group_store (orig_dst, src, ssize, align)
1983 rtx orig_dst, src;
1984 int ssize, align;
1985 {
1986 rtx *tmps, dst;
1987 int start, i;
1988
1989 if (GET_CODE (src) != PARALLEL)
1990 abort ();
1991
1992 /* Check for a NULL entry, used to indicate that the parameter goes
1993 both on the stack and in registers. */
1994 if (XEXP (XVECEXP (src, 0, 0), 0))
1995 start = 0;
1996 else
1997 start = 1;
1998
1999 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2000
2001 /* Copy the (probable) hard regs into pseudos. */
2002 for (i = start; i < XVECLEN (src, 0); i++)
2003 {
2004 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2005 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2006 emit_move_insn (tmps[i], reg);
2007 }
2008 emit_queue();
2009
2010 /* If we won't be storing directly into memory, protect the real destination
2011 from strange tricks we might play. */
2012 dst = orig_dst;
2013 if (GET_CODE (dst) == PARALLEL)
2014 {
2015 rtx temp;
2016
2017 /* We can get a PARALLEL dst if there is a conditional expression in
2018 a return statement. In that case, the dst and src are the same,
2019 so no action is necessary. */
2020 if (rtx_equal_p (dst, src))
2021 return;
2022
2023 /* It is unclear if we can ever reach here, but we may as well handle
2024 it. Allocate a temporary, and split this into a store/load to/from
2025 the temporary. */
2026
2027 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2028 emit_group_store (temp, src, ssize, align);
2029 emit_group_load (dst, temp, ssize, align);
2030 return;
2031 }
2032 else if (GET_CODE (dst) != MEM)
2033 {
2034 dst = gen_reg_rtx (GET_MODE (orig_dst));
2035 /* Make life a bit easier for combine. */
2036 emit_move_insn (dst, const0_rtx);
2037 }
2038 else if (! MEM_IN_STRUCT_P (dst))
2039 {
2040 /* store_bit_field requires that memory operations have
2041 mem_in_struct_p set; we might not. */
2042
2043 dst = copy_rtx (orig_dst);
2044 MEM_SET_IN_STRUCT_P (dst, 1);
2045 }
2046
2047 /* Process the pieces. */
2048 for (i = start; i < XVECLEN (src, 0); i++)
2049 {
2050 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2051 enum machine_mode mode = GET_MODE (tmps[i]);
2052 int bytelen = GET_MODE_SIZE (mode);
2053
2054 /* Handle trailing fragments that run over the size of the struct. */
2055 if (ssize >= 0 && bytepos + bytelen > ssize)
2056 {
2057 if (BYTES_BIG_ENDIAN)
2058 {
2059 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2060 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2061 tmps[i], 0, OPTAB_WIDEN);
2062 }
2063 bytelen = ssize - bytepos;
2064 }
2065
2066 /* Optimize the access just a bit. */
2067 if (GET_CODE (dst) == MEM
2068 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2069 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2070 && bytelen == GET_MODE_SIZE (mode))
2071 {
2072 emit_move_insn (change_address (dst, mode,
2073 plus_constant (XEXP (dst, 0),
2074 bytepos)),
2075 tmps[i]);
2076 }
2077 else
2078 {
2079 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2080 mode, tmps[i], align, ssize);
2081 }
2082 }
2083 emit_queue();
2084
2085 /* Copy from the pseudo into the (probable) hard reg. */
2086 if (GET_CODE (dst) == REG)
2087 emit_move_insn (orig_dst, dst);
2088 }
2089
2090 /* Generate code to copy a BLKmode object of TYPE out of a
2091 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2092 is null, a stack temporary is created. TGTBLK is returned.
2093
2094 The primary purpose of this routine is to handle functions
2095 that return BLKmode structures in registers. Some machines
2096 (the PA for example) want to return all small structures
2097 in registers regardless of the structure's alignment.
2098 */
2099
2100 rtx
2101 copy_blkmode_from_reg(tgtblk,srcreg,type)
2102 rtx tgtblk;
2103 rtx srcreg;
2104 tree type;
2105 {
2106 int bytes = int_size_in_bytes (type);
2107 rtx src = NULL, dst = NULL;
2108 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2109 int bitpos, xbitpos, big_endian_correction = 0;
2110
2111 if (tgtblk == 0)
2112 {
2113 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2114 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2115 preserve_temp_slots (tgtblk);
2116 }
2117
2118 /* This code assumes srcreg is at least a full word. If it isn't,
2119 copy it into a new pseudo which is a full word. */
2120 if (GET_MODE (srcreg) != BLKmode
2121 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2122 srcreg = convert_to_mode (word_mode, srcreg,
2123 TREE_UNSIGNED (type));
2124
2125 /* Structures whose size is not a multiple of a word are aligned
2126 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2127 machine, this means we must skip the empty high order bytes when
2128 calculating the bit offset. */
2129 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2130 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2131 * BITS_PER_UNIT));
2132
2133 /* Copy the structure BITSIZE bites at a time.
2134
2135 We could probably emit more efficient code for machines
2136 which do not use strict alignment, but it doesn't seem
2137 worth the effort at the current time. */
2138 for (bitpos = 0, xbitpos = big_endian_correction;
2139 bitpos < bytes * BITS_PER_UNIT;
2140 bitpos += bitsize, xbitpos += bitsize)
2141 {
2142
2143 /* We need a new source operand each time xbitpos is on a
2144 word boundary and when xbitpos == big_endian_correction
2145 (the first time through). */
2146 if (xbitpos % BITS_PER_WORD == 0
2147 || xbitpos == big_endian_correction)
2148 src = operand_subword_force (srcreg,
2149 xbitpos / BITS_PER_WORD,
2150 BLKmode);
2151
2152 /* We need a new destination operand each time bitpos is on
2153 a word boundary. */
2154 if (bitpos % BITS_PER_WORD == 0)
2155 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2156
2157 /* Use xbitpos for the source extraction (right justified) and
2158 xbitpos for the destination store (left justified). */
2159 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2160 extract_bit_field (src, bitsize,
2161 xbitpos % BITS_PER_WORD, 1,
2162 NULL_RTX, word_mode,
2163 word_mode,
2164 bitsize / BITS_PER_UNIT,
2165 BITS_PER_WORD),
2166 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2167 }
2168 return tgtblk;
2169 }
2170
2171
2172 /* Add a USE expression for REG to the (possibly empty) list pointed
2173 to by CALL_FUSAGE. REG must denote a hard register. */
2174
2175 void
2176 use_reg (call_fusage, reg)
2177 rtx *call_fusage, reg;
2178 {
2179 if (GET_CODE (reg) != REG
2180 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2181 abort();
2182
2183 *call_fusage
2184 = gen_rtx_EXPR_LIST (VOIDmode,
2185 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2186 }
2187
2188 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2189 starting at REGNO. All of these registers must be hard registers. */
2190
2191 void
2192 use_regs (call_fusage, regno, nregs)
2193 rtx *call_fusage;
2194 int regno;
2195 int nregs;
2196 {
2197 int i;
2198
2199 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2200 abort ();
2201
2202 for (i = 0; i < nregs; i++)
2203 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2204 }
2205
2206 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2207 PARALLEL REGS. This is for calls that pass values in multiple
2208 non-contiguous locations. The Irix 6 ABI has examples of this. */
2209
2210 void
2211 use_group_regs (call_fusage, regs)
2212 rtx *call_fusage;
2213 rtx regs;
2214 {
2215 int i;
2216
2217 for (i = 0; i < XVECLEN (regs, 0); i++)
2218 {
2219 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2220
2221 /* A NULL entry means the parameter goes both on the stack and in
2222 registers. This can also be a MEM for targets that pass values
2223 partially on the stack and partially in registers. */
2224 if (reg != 0 && GET_CODE (reg) == REG)
2225 use_reg (call_fusage, reg);
2226 }
2227 }
2228 \f
2229 /* Generate several move instructions to clear LEN bytes of block TO.
2230 (A MEM rtx with BLKmode). The caller must pass TO through
2231 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2232 we can assume. */
2233
2234 static void
2235 clear_by_pieces (to, len, align)
2236 rtx to;
2237 int len, align;
2238 {
2239 struct clear_by_pieces data;
2240 rtx to_addr = XEXP (to, 0);
2241 int max_size = MOVE_MAX_PIECES + 1;
2242 enum machine_mode mode = VOIDmode, tmode;
2243 enum insn_code icode;
2244
2245 data.offset = 0;
2246 data.to_addr = to_addr;
2247 data.to = to;
2248 data.autinc_to
2249 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2250 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2251
2252 data.explicit_inc_to = 0;
2253 data.reverse
2254 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2255 if (data.reverse) data.offset = len;
2256 data.len = len;
2257
2258 data.to_struct = MEM_IN_STRUCT_P (to);
2259
2260 /* If copying requires more than two move insns,
2261 copy addresses to registers (to make displacements shorter)
2262 and use post-increment if available. */
2263 if (!data.autinc_to
2264 && move_by_pieces_ninsns (len, align) > 2)
2265 {
2266 /* Determine the main mode we'll be using */
2267 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2268 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2269 if (GET_MODE_SIZE (tmode) < max_size)
2270 mode = tmode;
2271
2272 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2273 {
2274 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2275 data.autinc_to = 1;
2276 data.explicit_inc_to = -1;
2277 }
2278 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2279 {
2280 data.to_addr = copy_addr_to_reg (to_addr);
2281 data.autinc_to = 1;
2282 data.explicit_inc_to = 1;
2283 }
2284 if (!data.autinc_to && CONSTANT_P (to_addr))
2285 data.to_addr = copy_addr_to_reg (to_addr);
2286 }
2287
2288 if (! SLOW_UNALIGNED_ACCESS
2289 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2290 align = MOVE_MAX;
2291
2292 /* First move what we can in the largest integer mode, then go to
2293 successively smaller modes. */
2294
2295 while (max_size > 1)
2296 {
2297 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2298 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2299 if (GET_MODE_SIZE (tmode) < max_size)
2300 mode = tmode;
2301
2302 if (mode == VOIDmode)
2303 break;
2304
2305 icode = mov_optab->handlers[(int) mode].insn_code;
2306 if (icode != CODE_FOR_nothing
2307 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2308 GET_MODE_SIZE (mode)))
2309 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2310
2311 max_size = GET_MODE_SIZE (mode);
2312 }
2313
2314 /* The code above should have handled everything. */
2315 if (data.len != 0)
2316 abort ();
2317 }
2318
2319 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2320 with move instructions for mode MODE. GENFUN is the gen_... function
2321 to make a move insn for that mode. DATA has all the other info. */
2322
2323 static void
2324 clear_by_pieces_1 (genfun, mode, data)
2325 rtx (*genfun) PROTO ((rtx, ...));
2326 enum machine_mode mode;
2327 struct clear_by_pieces *data;
2328 {
2329 register int size = GET_MODE_SIZE (mode);
2330 register rtx to1;
2331
2332 while (data->len >= size)
2333 {
2334 if (data->reverse) data->offset -= size;
2335
2336 to1 = (data->autinc_to
2337 ? gen_rtx_MEM (mode, data->to_addr)
2338 : copy_rtx (change_address (data->to, mode,
2339 plus_constant (data->to_addr,
2340 data->offset))));
2341 MEM_IN_STRUCT_P (to1) = data->to_struct;
2342
2343 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2344 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2345
2346 emit_insn ((*genfun) (to1, const0_rtx));
2347 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2348 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2349
2350 if (! data->reverse) data->offset += size;
2351
2352 data->len -= size;
2353 }
2354 }
2355 \f
2356 /* Write zeros through the storage of OBJECT.
2357 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2358 the maximum alignment we can is has, measured in bytes.
2359
2360 If we call a function that returns the length of the block, return it. */
2361
2362 rtx
2363 clear_storage (object, size, align)
2364 rtx object;
2365 rtx size;
2366 int align;
2367 {
2368 #ifdef TARGET_MEM_FUNCTIONS
2369 static tree fn;
2370 tree call_expr, arg_list;
2371 #endif
2372 rtx retval = 0;
2373
2374 if (GET_MODE (object) == BLKmode)
2375 {
2376 object = protect_from_queue (object, 1);
2377 size = protect_from_queue (size, 0);
2378
2379 if (GET_CODE (size) == CONST_INT
2380 && MOVE_BY_PIECES_P (INTVAL (size), align))
2381 clear_by_pieces (object, INTVAL (size), align);
2382
2383 else
2384 {
2385 /* Try the most limited insn first, because there's no point
2386 including more than one in the machine description unless
2387 the more limited one has some advantage. */
2388
2389 rtx opalign = GEN_INT (align);
2390 enum machine_mode mode;
2391
2392 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2393 mode = GET_MODE_WIDER_MODE (mode))
2394 {
2395 enum insn_code code = clrstr_optab[(int) mode];
2396 insn_operand_predicate_fn pred;
2397
2398 if (code != CODE_FOR_nothing
2399 /* We don't need MODE to be narrower than
2400 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2401 the mode mask, as it is returned by the macro, it will
2402 definitely be less than the actual mode mask. */
2403 && ((GET_CODE (size) == CONST_INT
2404 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2405 <= (GET_MODE_MASK (mode) >> 1)))
2406 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2407 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2408 || (*pred) (object, BLKmode))
2409 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2410 || (*pred) (opalign, VOIDmode)))
2411 {
2412 rtx op1;
2413 rtx last = get_last_insn ();
2414 rtx pat;
2415
2416 op1 = convert_to_mode (mode, size, 1);
2417 pred = insn_data[(int) code].operand[1].predicate;
2418 if (pred != 0 && ! (*pred) (op1, mode))
2419 op1 = copy_to_mode_reg (mode, op1);
2420
2421 pat = GEN_FCN ((int) code) (object, op1, opalign);
2422 if (pat)
2423 {
2424 emit_insn (pat);
2425 return 0;
2426 }
2427 else
2428 delete_insns_since (last);
2429 }
2430 }
2431
2432 /* OBJECT or SIZE may have been passed through protect_from_queue.
2433
2434 It is unsafe to save the value generated by protect_from_queue
2435 and reuse it later. Consider what happens if emit_queue is
2436 called before the return value from protect_from_queue is used.
2437
2438 Expansion of the CALL_EXPR below will call emit_queue before
2439 we are finished emitting RTL for argument setup. So if we are
2440 not careful we could get the wrong value for an argument.
2441
2442 To avoid this problem we go ahead and emit code to copy OBJECT
2443 and SIZE into new pseudos. We can then place those new pseudos
2444 into an RTL_EXPR and use them later, even after a call to
2445 emit_queue.
2446
2447 Note this is not strictly needed for library calls since they
2448 do not call emit_queue before loading their arguments. However,
2449 we may need to have library calls call emit_queue in the future
2450 since failing to do so could cause problems for targets which
2451 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2452 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2453
2454 #ifdef TARGET_MEM_FUNCTIONS
2455 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2456 #else
2457 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2458 TREE_UNSIGNED (integer_type_node));
2459 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2460 #endif
2461
2462
2463 #ifdef TARGET_MEM_FUNCTIONS
2464 /* It is incorrect to use the libcall calling conventions to call
2465 memset in this context.
2466
2467 This could be a user call to memset and the user may wish to
2468 examine the return value from memset.
2469
2470 For targets where libcalls and normal calls have different
2471 conventions for returning pointers, we could end up generating
2472 incorrect code.
2473
2474 So instead of using a libcall sequence we build up a suitable
2475 CALL_EXPR and expand the call in the normal fashion. */
2476 if (fn == NULL_TREE)
2477 {
2478 tree fntype;
2479
2480 /* This was copied from except.c, I don't know if all this is
2481 necessary in this context or not. */
2482 fn = get_identifier ("memset");
2483 push_obstacks_nochange ();
2484 end_temporary_allocation ();
2485 fntype = build_pointer_type (void_type_node);
2486 fntype = build_function_type (fntype, NULL_TREE);
2487 fn = build_decl (FUNCTION_DECL, fn, fntype);
2488 ggc_add_tree_root (&fn, 1);
2489 DECL_EXTERNAL (fn) = 1;
2490 TREE_PUBLIC (fn) = 1;
2491 DECL_ARTIFICIAL (fn) = 1;
2492 make_decl_rtl (fn, NULL_PTR, 1);
2493 assemble_external (fn);
2494 pop_obstacks ();
2495 }
2496
2497 /* We need to make an argument list for the function call.
2498
2499 memset has three arguments, the first is a void * addresses, the
2500 second a integer with the initialization value, the last is a
2501 size_t byte count for the copy. */
2502 arg_list
2503 = build_tree_list (NULL_TREE,
2504 make_tree (build_pointer_type (void_type_node),
2505 object));
2506 TREE_CHAIN (arg_list)
2507 = build_tree_list (NULL_TREE,
2508 make_tree (integer_type_node, const0_rtx));
2509 TREE_CHAIN (TREE_CHAIN (arg_list))
2510 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2511 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2512
2513 /* Now we have to build up the CALL_EXPR itself. */
2514 call_expr = build1 (ADDR_EXPR,
2515 build_pointer_type (TREE_TYPE (fn)), fn);
2516 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2517 call_expr, arg_list, NULL_TREE);
2518 TREE_SIDE_EFFECTS (call_expr) = 1;
2519
2520 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2521 #else
2522 emit_library_call (bzero_libfunc, 0,
2523 VOIDmode, 2, object, Pmode, size,
2524 TYPE_MODE (integer_type_node));
2525 #endif
2526 }
2527 }
2528 else
2529 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2530
2531 return retval;
2532 }
2533
2534 /* Generate code to copy Y into X.
2535 Both Y and X must have the same mode, except that
2536 Y can be a constant with VOIDmode.
2537 This mode cannot be BLKmode; use emit_block_move for that.
2538
2539 Return the last instruction emitted. */
2540
2541 rtx
2542 emit_move_insn (x, y)
2543 rtx x, y;
2544 {
2545 enum machine_mode mode = GET_MODE (x);
2546
2547 x = protect_from_queue (x, 1);
2548 y = protect_from_queue (y, 0);
2549
2550 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2551 abort ();
2552
2553 /* Never force constant_p_rtx to memory. */
2554 if (GET_CODE (y) == CONSTANT_P_RTX)
2555 ;
2556 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2557 y = force_const_mem (mode, y);
2558
2559 /* If X or Y are memory references, verify that their addresses are valid
2560 for the machine. */
2561 if (GET_CODE (x) == MEM
2562 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2563 && ! push_operand (x, GET_MODE (x)))
2564 || (flag_force_addr
2565 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2566 x = change_address (x, VOIDmode, XEXP (x, 0));
2567
2568 if (GET_CODE (y) == MEM
2569 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2570 || (flag_force_addr
2571 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2572 y = change_address (y, VOIDmode, XEXP (y, 0));
2573
2574 if (mode == BLKmode)
2575 abort ();
2576
2577 return emit_move_insn_1 (x, y);
2578 }
2579
2580 /* Low level part of emit_move_insn.
2581 Called just like emit_move_insn, but assumes X and Y
2582 are basically valid. */
2583
2584 rtx
2585 emit_move_insn_1 (x, y)
2586 rtx x, y;
2587 {
2588 enum machine_mode mode = GET_MODE (x);
2589 enum machine_mode submode;
2590 enum mode_class class = GET_MODE_CLASS (mode);
2591 int i;
2592
2593 if (mode >= MAX_MACHINE_MODE)
2594 abort ();
2595
2596 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2597 return
2598 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2599
2600 /* Expand complex moves by moving real part and imag part, if possible. */
2601 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2602 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2603 * BITS_PER_UNIT),
2604 (class == MODE_COMPLEX_INT
2605 ? MODE_INT : MODE_FLOAT),
2606 0))
2607 && (mov_optab->handlers[(int) submode].insn_code
2608 != CODE_FOR_nothing))
2609 {
2610 /* Don't split destination if it is a stack push. */
2611 int stack = push_operand (x, GET_MODE (x));
2612
2613 /* If this is a stack, push the highpart first, so it
2614 will be in the argument order.
2615
2616 In that case, change_address is used only to convert
2617 the mode, not to change the address. */
2618 if (stack)
2619 {
2620 /* Note that the real part always precedes the imag part in memory
2621 regardless of machine's endianness. */
2622 #ifdef STACK_GROWS_DOWNWARD
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2625 gen_imagpart (submode, y)));
2626 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2627 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2628 gen_realpart (submode, y)));
2629 #else
2630 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2631 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2632 gen_realpart (submode, y)));
2633 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2634 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2635 gen_imagpart (submode, y)));
2636 #endif
2637 }
2638 else
2639 {
2640 rtx realpart_x, realpart_y;
2641 rtx imagpart_x, imagpart_y;
2642
2643 /* If this is a complex value with each part being smaller than a
2644 word, the usual calling sequence will likely pack the pieces into
2645 a single register. Unfortunately, SUBREG of hard registers only
2646 deals in terms of words, so we have a problem converting input
2647 arguments to the CONCAT of two registers that is used elsewhere
2648 for complex values. If this is before reload, we can copy it into
2649 memory and reload. FIXME, we should see about using extract and
2650 insert on integer registers, but complex short and complex char
2651 variables should be rarely used. */
2652 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2653 && (reload_in_progress | reload_completed) == 0)
2654 {
2655 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2656 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2657
2658 if (packed_dest_p || packed_src_p)
2659 {
2660 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2661 ? MODE_FLOAT : MODE_INT);
2662
2663 enum machine_mode reg_mode =
2664 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2665
2666 if (reg_mode != BLKmode)
2667 {
2668 rtx mem = assign_stack_temp (reg_mode,
2669 GET_MODE_SIZE (mode), 0);
2670
2671 rtx cmem = change_address (mem, mode, NULL_RTX);
2672
2673 cfun->cannot_inline = "function uses short complex types";
2674
2675 if (packed_dest_p)
2676 {
2677 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2678 emit_move_insn_1 (cmem, y);
2679 return emit_move_insn_1 (sreg, mem);
2680 }
2681 else
2682 {
2683 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2684 emit_move_insn_1 (mem, sreg);
2685 return emit_move_insn_1 (x, cmem);
2686 }
2687 }
2688 }
2689 }
2690
2691 realpart_x = gen_realpart (submode, x);
2692 realpart_y = gen_realpart (submode, y);
2693 imagpart_x = gen_imagpart (submode, x);
2694 imagpart_y = gen_imagpart (submode, y);
2695
2696 /* Show the output dies here. This is necessary for SUBREGs
2697 of pseudos since we cannot track their lifetimes correctly;
2698 hard regs shouldn't appear here except as return values.
2699 We never want to emit such a clobber after reload. */
2700 if (x != y
2701 && ! (reload_in_progress || reload_completed)
2702 && (GET_CODE (realpart_x) == SUBREG
2703 || GET_CODE (imagpart_x) == SUBREG))
2704 {
2705 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2706 }
2707
2708 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2709 (realpart_x, realpart_y));
2710 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2711 (imagpart_x, imagpart_y));
2712 }
2713
2714 return get_last_insn ();
2715 }
2716
2717 /* This will handle any multi-word mode that lacks a move_insn pattern.
2718 However, you will get better code if you define such patterns,
2719 even if they must turn into multiple assembler instructions. */
2720 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2721 {
2722 rtx last_insn = 0;
2723 rtx seq;
2724 int need_clobber;
2725
2726 #ifdef PUSH_ROUNDING
2727
2728 /* If X is a push on the stack, do the push now and replace
2729 X with a reference to the stack pointer. */
2730 if (push_operand (x, GET_MODE (x)))
2731 {
2732 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2733 x = change_address (x, VOIDmode, stack_pointer_rtx);
2734 }
2735 #endif
2736
2737 start_sequence ();
2738
2739 need_clobber = 0;
2740 for (i = 0;
2741 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2742 i++)
2743 {
2744 rtx xpart = operand_subword (x, i, 1, mode);
2745 rtx ypart = operand_subword (y, i, 1, mode);
2746
2747 /* If we can't get a part of Y, put Y into memory if it is a
2748 constant. Otherwise, force it into a register. If we still
2749 can't get a part of Y, abort. */
2750 if (ypart == 0 && CONSTANT_P (y))
2751 {
2752 y = force_const_mem (mode, y);
2753 ypart = operand_subword (y, i, 1, mode);
2754 }
2755 else if (ypart == 0)
2756 ypart = operand_subword_force (y, i, mode);
2757
2758 if (xpart == 0 || ypart == 0)
2759 abort ();
2760
2761 need_clobber |= (GET_CODE (xpart) == SUBREG);
2762
2763 last_insn = emit_move_insn (xpart, ypart);
2764 }
2765
2766 seq = gen_sequence ();
2767 end_sequence ();
2768
2769 /* Show the output dies here. This is necessary for SUBREGs
2770 of pseudos since we cannot track their lifetimes correctly;
2771 hard regs shouldn't appear here except as return values.
2772 We never want to emit such a clobber after reload. */
2773 if (x != y
2774 && ! (reload_in_progress || reload_completed)
2775 && need_clobber != 0)
2776 {
2777 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2778 }
2779
2780 emit_insn (seq);
2781
2782 return last_insn;
2783 }
2784 else
2785 abort ();
2786 }
2787 \f
2788 /* Pushing data onto the stack. */
2789
2790 /* Push a block of length SIZE (perhaps variable)
2791 and return an rtx to address the beginning of the block.
2792 Note that it is not possible for the value returned to be a QUEUED.
2793 The value may be virtual_outgoing_args_rtx.
2794
2795 EXTRA is the number of bytes of padding to push in addition to SIZE.
2796 BELOW nonzero means this padding comes at low addresses;
2797 otherwise, the padding comes at high addresses. */
2798
2799 rtx
2800 push_block (size, extra, below)
2801 rtx size;
2802 int extra, below;
2803 {
2804 register rtx temp;
2805
2806 size = convert_modes (Pmode, ptr_mode, size, 1);
2807 if (CONSTANT_P (size))
2808 anti_adjust_stack (plus_constant (size, extra));
2809 else if (GET_CODE (size) == REG && extra == 0)
2810 anti_adjust_stack (size);
2811 else
2812 {
2813 rtx temp = copy_to_mode_reg (Pmode, size);
2814 if (extra != 0)
2815 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2816 temp, 0, OPTAB_LIB_WIDEN);
2817 anti_adjust_stack (temp);
2818 }
2819
2820 #if defined (STACK_GROWS_DOWNWARD) \
2821 || (defined (ARGS_GROW_DOWNWARD) \
2822 && !defined (ACCUMULATE_OUTGOING_ARGS))
2823
2824 /* Return the lowest stack address when STACK or ARGS grow downward and
2825 we are not aaccumulating outgoing arguments (the c4x port uses such
2826 conventions). */
2827 temp = virtual_outgoing_args_rtx;
2828 if (extra != 0 && below)
2829 temp = plus_constant (temp, extra);
2830 #else
2831 if (GET_CODE (size) == CONST_INT)
2832 temp = plus_constant (virtual_outgoing_args_rtx,
2833 - INTVAL (size) - (below ? 0 : extra));
2834 else if (extra != 0 && !below)
2835 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2836 negate_rtx (Pmode, plus_constant (size, extra)));
2837 else
2838 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2839 negate_rtx (Pmode, size));
2840 #endif
2841
2842 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2843 }
2844
2845 rtx
2846 gen_push_operand ()
2847 {
2848 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2849 }
2850
2851 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2852 block of SIZE bytes. */
2853
2854 static rtx
2855 get_push_address (size)
2856 int size;
2857 {
2858 register rtx temp;
2859
2860 if (STACK_PUSH_CODE == POST_DEC)
2861 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2862 else if (STACK_PUSH_CODE == POST_INC)
2863 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2864 else
2865 temp = stack_pointer_rtx;
2866
2867 return copy_to_reg (temp);
2868 }
2869
2870 /* Generate code to push X onto the stack, assuming it has mode MODE and
2871 type TYPE.
2872 MODE is redundant except when X is a CONST_INT (since they don't
2873 carry mode info).
2874 SIZE is an rtx for the size of data to be copied (in bytes),
2875 needed only if X is BLKmode.
2876
2877 ALIGN (in bytes) is maximum alignment we can assume.
2878
2879 If PARTIAL and REG are both nonzero, then copy that many of the first
2880 words of X into registers starting with REG, and push the rest of X.
2881 The amount of space pushed is decreased by PARTIAL words,
2882 rounded *down* to a multiple of PARM_BOUNDARY.
2883 REG must be a hard register in this case.
2884 If REG is zero but PARTIAL is not, take any all others actions for an
2885 argument partially in registers, but do not actually load any
2886 registers.
2887
2888 EXTRA is the amount in bytes of extra space to leave next to this arg.
2889 This is ignored if an argument block has already been allocated.
2890
2891 On a machine that lacks real push insns, ARGS_ADDR is the address of
2892 the bottom of the argument block for this call. We use indexing off there
2893 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2894 argument block has not been preallocated.
2895
2896 ARGS_SO_FAR is the size of args previously pushed for this call.
2897
2898 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2899 for arguments passed in registers. If nonzero, it will be the number
2900 of bytes required. */
2901
2902 void
2903 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2904 args_addr, args_so_far, reg_parm_stack_space,
2905 alignment_pad)
2906 register rtx x;
2907 enum machine_mode mode;
2908 tree type;
2909 rtx size;
2910 int align;
2911 int partial;
2912 rtx reg;
2913 int extra;
2914 rtx args_addr;
2915 rtx args_so_far;
2916 int reg_parm_stack_space;
2917 rtx alignment_pad;
2918 {
2919 rtx xinner;
2920 enum direction stack_direction
2921 #ifdef STACK_GROWS_DOWNWARD
2922 = downward;
2923 #else
2924 = upward;
2925 #endif
2926
2927 /* Decide where to pad the argument: `downward' for below,
2928 `upward' for above, or `none' for don't pad it.
2929 Default is below for small data on big-endian machines; else above. */
2930 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2931
2932 /* Invert direction if stack is post-update. */
2933 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2934 if (where_pad != none)
2935 where_pad = (where_pad == downward ? upward : downward);
2936
2937 xinner = x = protect_from_queue (x, 0);
2938
2939 if (mode == BLKmode)
2940 {
2941 /* Copy a block into the stack, entirely or partially. */
2942
2943 register rtx temp;
2944 int used = partial * UNITS_PER_WORD;
2945 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2946 int skip;
2947
2948 if (size == 0)
2949 abort ();
2950
2951 used -= offset;
2952
2953 /* USED is now the # of bytes we need not copy to the stack
2954 because registers will take care of them. */
2955
2956 if (partial != 0)
2957 xinner = change_address (xinner, BLKmode,
2958 plus_constant (XEXP (xinner, 0), used));
2959
2960 /* If the partial register-part of the arg counts in its stack size,
2961 skip the part of stack space corresponding to the registers.
2962 Otherwise, start copying to the beginning of the stack space,
2963 by setting SKIP to 0. */
2964 skip = (reg_parm_stack_space == 0) ? 0 : used;
2965
2966 #ifdef PUSH_ROUNDING
2967 /* Do it with several push insns if that doesn't take lots of insns
2968 and if there is no difficulty with push insns that skip bytes
2969 on the stack for alignment purposes. */
2970 if (args_addr == 0
2971 && GET_CODE (size) == CONST_INT
2972 && skip == 0
2973 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2974 /* Here we avoid the case of a structure whose weak alignment
2975 forces many pushes of a small amount of data,
2976 and such small pushes do rounding that causes trouble. */
2977 && ((! SLOW_UNALIGNED_ACCESS)
2978 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2979 || PUSH_ROUNDING (align) == align)
2980 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2981 {
2982 /* Push padding now if padding above and stack grows down,
2983 or if padding below and stack grows up.
2984 But if space already allocated, this has already been done. */
2985 if (extra && args_addr == 0
2986 && where_pad != none && where_pad != stack_direction)
2987 anti_adjust_stack (GEN_INT (extra));
2988
2989 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2990 INTVAL (size) - used, align);
2991
2992 if (current_function_check_memory_usage && ! in_check_memory_usage)
2993 {
2994 rtx temp;
2995
2996 in_check_memory_usage = 1;
2997 temp = get_push_address (INTVAL(size) - used);
2998 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2999 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3000 temp, Pmode,
3001 XEXP (xinner, 0), Pmode,
3002 GEN_INT (INTVAL(size) - used),
3003 TYPE_MODE (sizetype));
3004 else
3005 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3006 temp, Pmode,
3007 GEN_INT (INTVAL(size) - used),
3008 TYPE_MODE (sizetype),
3009 GEN_INT (MEMORY_USE_RW),
3010 TYPE_MODE (integer_type_node));
3011 in_check_memory_usage = 0;
3012 }
3013 }
3014 else
3015 #endif /* PUSH_ROUNDING */
3016 {
3017 /* Otherwise make space on the stack and copy the data
3018 to the address of that space. */
3019
3020 /* Deduct words put into registers from the size we must copy. */
3021 if (partial != 0)
3022 {
3023 if (GET_CODE (size) == CONST_INT)
3024 size = GEN_INT (INTVAL (size) - used);
3025 else
3026 size = expand_binop (GET_MODE (size), sub_optab, size,
3027 GEN_INT (used), NULL_RTX, 0,
3028 OPTAB_LIB_WIDEN);
3029 }
3030
3031 /* Get the address of the stack space.
3032 In this case, we do not deal with EXTRA separately.
3033 A single stack adjust will do. */
3034 if (! args_addr)
3035 {
3036 temp = push_block (size, extra, where_pad == downward);
3037 extra = 0;
3038 }
3039 else if (GET_CODE (args_so_far) == CONST_INT)
3040 temp = memory_address (BLKmode,
3041 plus_constant (args_addr,
3042 skip + INTVAL (args_so_far)));
3043 else
3044 temp = memory_address (BLKmode,
3045 plus_constant (gen_rtx_PLUS (Pmode,
3046 args_addr,
3047 args_so_far),
3048 skip));
3049 if (current_function_check_memory_usage && ! in_check_memory_usage)
3050 {
3051 rtx target;
3052
3053 in_check_memory_usage = 1;
3054 target = copy_to_reg (temp);
3055 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3056 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3057 target, Pmode,
3058 XEXP (xinner, 0), Pmode,
3059 size, TYPE_MODE (sizetype));
3060 else
3061 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3062 target, Pmode,
3063 size, TYPE_MODE (sizetype),
3064 GEN_INT (MEMORY_USE_RW),
3065 TYPE_MODE (integer_type_node));
3066 in_check_memory_usage = 0;
3067 }
3068
3069 /* TEMP is the address of the block. Copy the data there. */
3070 if (GET_CODE (size) == CONST_INT
3071 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3072 {
3073 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3074 INTVAL (size), align);
3075 goto ret;
3076 }
3077 else
3078 {
3079 rtx opalign = GEN_INT (align);
3080 enum machine_mode mode;
3081 rtx target = gen_rtx_MEM (BLKmode, temp);
3082
3083 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3084 mode != VOIDmode;
3085 mode = GET_MODE_WIDER_MODE (mode))
3086 {
3087 enum insn_code code = movstr_optab[(int) mode];
3088 insn_operand_predicate_fn pred;
3089
3090 if (code != CODE_FOR_nothing
3091 && ((GET_CODE (size) == CONST_INT
3092 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3093 <= (GET_MODE_MASK (mode) >> 1)))
3094 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3095 && (!(pred = insn_data[(int) code].operand[0].predicate)
3096 || ((*pred) (target, BLKmode)))
3097 && (!(pred = insn_data[(int) code].operand[1].predicate)
3098 || ((*pred) (xinner, BLKmode)))
3099 && (!(pred = insn_data[(int) code].operand[3].predicate)
3100 || ((*pred) (opalign, VOIDmode))))
3101 {
3102 rtx op2 = convert_to_mode (mode, size, 1);
3103 rtx last = get_last_insn ();
3104 rtx pat;
3105
3106 pred = insn_data[(int) code].operand[2].predicate;
3107 if (pred != 0 && ! (*pred) (op2, mode))
3108 op2 = copy_to_mode_reg (mode, op2);
3109
3110 pat = GEN_FCN ((int) code) (target, xinner,
3111 op2, opalign);
3112 if (pat)
3113 {
3114 emit_insn (pat);
3115 goto ret;
3116 }
3117 else
3118 delete_insns_since (last);
3119 }
3120 }
3121 }
3122
3123 #ifndef ACCUMULATE_OUTGOING_ARGS
3124 /* If the source is referenced relative to the stack pointer,
3125 copy it to another register to stabilize it. We do not need
3126 to do this if we know that we won't be changing sp. */
3127
3128 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3129 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3130 temp = copy_to_reg (temp);
3131 #endif
3132
3133 /* Make inhibit_defer_pop nonzero around the library call
3134 to force it to pop the bcopy-arguments right away. */
3135 NO_DEFER_POP;
3136 #ifdef TARGET_MEM_FUNCTIONS
3137 emit_library_call (memcpy_libfunc, 0,
3138 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3139 convert_to_mode (TYPE_MODE (sizetype),
3140 size, TREE_UNSIGNED (sizetype)),
3141 TYPE_MODE (sizetype));
3142 #else
3143 emit_library_call (bcopy_libfunc, 0,
3144 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3145 convert_to_mode (TYPE_MODE (integer_type_node),
3146 size,
3147 TREE_UNSIGNED (integer_type_node)),
3148 TYPE_MODE (integer_type_node));
3149 #endif
3150 OK_DEFER_POP;
3151 }
3152 }
3153 else if (partial > 0)
3154 {
3155 /* Scalar partly in registers. */
3156
3157 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3158 int i;
3159 int not_stack;
3160 /* # words of start of argument
3161 that we must make space for but need not store. */
3162 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3163 int args_offset = INTVAL (args_so_far);
3164 int skip;
3165
3166 /* Push padding now if padding above and stack grows down,
3167 or if padding below and stack grows up.
3168 But if space already allocated, this has already been done. */
3169 if (extra && args_addr == 0
3170 && where_pad != none && where_pad != stack_direction)
3171 anti_adjust_stack (GEN_INT (extra));
3172
3173 /* If we make space by pushing it, we might as well push
3174 the real data. Otherwise, we can leave OFFSET nonzero
3175 and leave the space uninitialized. */
3176 if (args_addr == 0)
3177 offset = 0;
3178
3179 /* Now NOT_STACK gets the number of words that we don't need to
3180 allocate on the stack. */
3181 not_stack = partial - offset;
3182
3183 /* If the partial register-part of the arg counts in its stack size,
3184 skip the part of stack space corresponding to the registers.
3185 Otherwise, start copying to the beginning of the stack space,
3186 by setting SKIP to 0. */
3187 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3188
3189 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3190 x = validize_mem (force_const_mem (mode, x));
3191
3192 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3193 SUBREGs of such registers are not allowed. */
3194 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3195 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3196 x = copy_to_reg (x);
3197
3198 /* Loop over all the words allocated on the stack for this arg. */
3199 /* We can do it by words, because any scalar bigger than a word
3200 has a size a multiple of a word. */
3201 #ifndef PUSH_ARGS_REVERSED
3202 for (i = not_stack; i < size; i++)
3203 #else
3204 for (i = size - 1; i >= not_stack; i--)
3205 #endif
3206 if (i >= not_stack + offset)
3207 emit_push_insn (operand_subword_force (x, i, mode),
3208 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3209 0, args_addr,
3210 GEN_INT (args_offset + ((i - not_stack + skip)
3211 * UNITS_PER_WORD)),
3212 reg_parm_stack_space, alignment_pad);
3213 }
3214 else
3215 {
3216 rtx addr;
3217 rtx target = NULL_RTX;
3218
3219 /* Push padding now if padding above and stack grows down,
3220 or if padding below and stack grows up.
3221 But if space already allocated, this has already been done. */
3222 if (extra && args_addr == 0
3223 && where_pad != none && where_pad != stack_direction)
3224 anti_adjust_stack (GEN_INT (extra));
3225
3226 #ifdef PUSH_ROUNDING
3227 if (args_addr == 0)
3228 addr = gen_push_operand ();
3229 else
3230 #endif
3231 {
3232 if (GET_CODE (args_so_far) == CONST_INT)
3233 addr
3234 = memory_address (mode,
3235 plus_constant (args_addr,
3236 INTVAL (args_so_far)));
3237 else
3238 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3239 args_so_far));
3240 target = addr;
3241 }
3242
3243 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3244
3245 if (current_function_check_memory_usage && ! in_check_memory_usage)
3246 {
3247 in_check_memory_usage = 1;
3248 if (target == 0)
3249 target = get_push_address (GET_MODE_SIZE (mode));
3250
3251 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3252 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3253 target, Pmode,
3254 XEXP (x, 0), Pmode,
3255 GEN_INT (GET_MODE_SIZE (mode)),
3256 TYPE_MODE (sizetype));
3257 else
3258 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3259 target, Pmode,
3260 GEN_INT (GET_MODE_SIZE (mode)),
3261 TYPE_MODE (sizetype),
3262 GEN_INT (MEMORY_USE_RW),
3263 TYPE_MODE (integer_type_node));
3264 in_check_memory_usage = 0;
3265 }
3266 }
3267
3268 ret:
3269 /* If part should go in registers, copy that part
3270 into the appropriate registers. Do this now, at the end,
3271 since mem-to-mem copies above may do function calls. */
3272 if (partial > 0 && reg != 0)
3273 {
3274 /* Handle calls that pass values in multiple non-contiguous locations.
3275 The Irix 6 ABI has examples of this. */
3276 if (GET_CODE (reg) == PARALLEL)
3277 emit_group_load (reg, x, -1, align); /* ??? size? */
3278 else
3279 move_block_to_reg (REGNO (reg), x, partial, mode);
3280 }
3281
3282 if (extra && args_addr == 0 && where_pad == stack_direction)
3283 anti_adjust_stack (GEN_INT (extra));
3284
3285 if (alignment_pad)
3286 anti_adjust_stack (alignment_pad);
3287 }
3288 \f
3289 /* Expand an assignment that stores the value of FROM into TO.
3290 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3291 (This may contain a QUEUED rtx;
3292 if the value is constant, this rtx is a constant.)
3293 Otherwise, the returned value is NULL_RTX.
3294
3295 SUGGEST_REG is no longer actually used.
3296 It used to mean, copy the value through a register
3297 and return that register, if that is possible.
3298 We now use WANT_VALUE to decide whether to do this. */
3299
3300 rtx
3301 expand_assignment (to, from, want_value, suggest_reg)
3302 tree to, from;
3303 int want_value;
3304 int suggest_reg ATTRIBUTE_UNUSED;
3305 {
3306 register rtx to_rtx = 0;
3307 rtx result;
3308
3309 /* Don't crash if the lhs of the assignment was erroneous. */
3310
3311 if (TREE_CODE (to) == ERROR_MARK)
3312 {
3313 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3314 return want_value ? result : NULL_RTX;
3315 }
3316
3317 /* Assignment of a structure component needs special treatment
3318 if the structure component's rtx is not simply a MEM.
3319 Assignment of an array element at a constant index, and assignment of
3320 an array element in an unaligned packed structure field, has the same
3321 problem. */
3322
3323 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3324 || TREE_CODE (to) == ARRAY_REF)
3325 {
3326 enum machine_mode mode1;
3327 int bitsize;
3328 int bitpos;
3329 tree offset;
3330 int unsignedp;
3331 int volatilep = 0;
3332 tree tem;
3333 int alignment;
3334
3335 push_temp_slots ();
3336 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3337 &unsignedp, &volatilep, &alignment);
3338
3339 /* If we are going to use store_bit_field and extract_bit_field,
3340 make sure to_rtx will be safe for multiple use. */
3341
3342 if (mode1 == VOIDmode && want_value)
3343 tem = stabilize_reference (tem);
3344
3345 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3346 if (offset != 0)
3347 {
3348 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3349
3350 if (GET_CODE (to_rtx) != MEM)
3351 abort ();
3352
3353 if (GET_MODE (offset_rtx) != ptr_mode)
3354 {
3355 #ifdef POINTERS_EXTEND_UNSIGNED
3356 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3357 #else
3358 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3359 #endif
3360 }
3361
3362 /* A constant address in TO_RTX can have VOIDmode, we must not try
3363 to call force_reg for that case. Avoid that case. */
3364 if (GET_CODE (to_rtx) == MEM
3365 && GET_MODE (to_rtx) == BLKmode
3366 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3367 && bitsize
3368 && (bitpos % bitsize) == 0
3369 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3370 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3371 {
3372 rtx temp = change_address (to_rtx, mode1,
3373 plus_constant (XEXP (to_rtx, 0),
3374 (bitpos /
3375 BITS_PER_UNIT)));
3376 if (GET_CODE (XEXP (temp, 0)) == REG)
3377 to_rtx = temp;
3378 else
3379 to_rtx = change_address (to_rtx, mode1,
3380 force_reg (GET_MODE (XEXP (temp, 0)),
3381 XEXP (temp, 0)));
3382 bitpos = 0;
3383 }
3384
3385 to_rtx = change_address (to_rtx, VOIDmode,
3386 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3387 force_reg (ptr_mode,
3388 offset_rtx)));
3389 }
3390
3391 if (volatilep)
3392 {
3393 if (GET_CODE (to_rtx) == MEM)
3394 {
3395 /* When the offset is zero, to_rtx is the address of the
3396 structure we are storing into, and hence may be shared.
3397 We must make a new MEM before setting the volatile bit. */
3398 if (offset == 0)
3399 to_rtx = copy_rtx (to_rtx);
3400
3401 MEM_VOLATILE_P (to_rtx) = 1;
3402 }
3403 #if 0 /* This was turned off because, when a field is volatile
3404 in an object which is not volatile, the object may be in a register,
3405 and then we would abort over here. */
3406 else
3407 abort ();
3408 #endif
3409 }
3410
3411 if (TREE_CODE (to) == COMPONENT_REF
3412 && TREE_READONLY (TREE_OPERAND (to, 1)))
3413 {
3414 if (offset == 0)
3415 to_rtx = copy_rtx (to_rtx);
3416
3417 RTX_UNCHANGING_P (to_rtx) = 1;
3418 }
3419
3420 /* Check the access. */
3421 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3422 {
3423 rtx to_addr;
3424 int size;
3425 int best_mode_size;
3426 enum machine_mode best_mode;
3427
3428 best_mode = get_best_mode (bitsize, bitpos,
3429 TYPE_ALIGN (TREE_TYPE (tem)),
3430 mode1, volatilep);
3431 if (best_mode == VOIDmode)
3432 best_mode = QImode;
3433
3434 best_mode_size = GET_MODE_BITSIZE (best_mode);
3435 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3436 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3437 size *= GET_MODE_SIZE (best_mode);
3438
3439 /* Check the access right of the pointer. */
3440 if (size)
3441 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3442 to_addr, Pmode,
3443 GEN_INT (size), TYPE_MODE (sizetype),
3444 GEN_INT (MEMORY_USE_WO),
3445 TYPE_MODE (integer_type_node));
3446 }
3447
3448 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3449 (want_value
3450 /* Spurious cast makes HPUX compiler happy. */
3451 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3452 : VOIDmode),
3453 unsignedp,
3454 /* Required alignment of containing datum. */
3455 alignment,
3456 int_size_in_bytes (TREE_TYPE (tem)),
3457 get_alias_set (to));
3458 preserve_temp_slots (result);
3459 free_temp_slots ();
3460 pop_temp_slots ();
3461
3462 /* If the value is meaningful, convert RESULT to the proper mode.
3463 Otherwise, return nothing. */
3464 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3465 TYPE_MODE (TREE_TYPE (from)),
3466 result,
3467 TREE_UNSIGNED (TREE_TYPE (to)))
3468 : NULL_RTX);
3469 }
3470
3471 /* If the rhs is a function call and its value is not an aggregate,
3472 call the function before we start to compute the lhs.
3473 This is needed for correct code for cases such as
3474 val = setjmp (buf) on machines where reference to val
3475 requires loading up part of an address in a separate insn.
3476
3477 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3478 a promoted variable where the zero- or sign- extension needs to be done.
3479 Handling this in the normal way is safe because no computation is done
3480 before the call. */
3481 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3482 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3483 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3484 {
3485 rtx value;
3486
3487 push_temp_slots ();
3488 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3489 if (to_rtx == 0)
3490 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3491
3492 /* Handle calls that return values in multiple non-contiguous locations.
3493 The Irix 6 ABI has examples of this. */
3494 if (GET_CODE (to_rtx) == PARALLEL)
3495 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3496 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3497 else if (GET_MODE (to_rtx) == BLKmode)
3498 emit_block_move (to_rtx, value, expr_size (from),
3499 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3500 else
3501 {
3502 #ifdef POINTERS_EXTEND_UNSIGNED
3503 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3504 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3505 value = convert_memory_address (GET_MODE (to_rtx), value);
3506 #endif
3507 emit_move_insn (to_rtx, value);
3508 }
3509 preserve_temp_slots (to_rtx);
3510 free_temp_slots ();
3511 pop_temp_slots ();
3512 return want_value ? to_rtx : NULL_RTX;
3513 }
3514
3515 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3516 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3517
3518 if (to_rtx == 0)
3519 {
3520 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3521 if (GET_CODE (to_rtx) == MEM)
3522 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3523 }
3524
3525 /* Don't move directly into a return register. */
3526 if (TREE_CODE (to) == RESULT_DECL
3527 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3528 {
3529 rtx temp;
3530
3531 push_temp_slots ();
3532 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3533
3534 if (GET_CODE (to_rtx) == PARALLEL)
3535 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3536 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3537 else
3538 emit_move_insn (to_rtx, temp);
3539
3540 preserve_temp_slots (to_rtx);
3541 free_temp_slots ();
3542 pop_temp_slots ();
3543 return want_value ? to_rtx : NULL_RTX;
3544 }
3545
3546 /* In case we are returning the contents of an object which overlaps
3547 the place the value is being stored, use a safe function when copying
3548 a value through a pointer into a structure value return block. */
3549 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3550 && current_function_returns_struct
3551 && !current_function_returns_pcc_struct)
3552 {
3553 rtx from_rtx, size;
3554
3555 push_temp_slots ();
3556 size = expr_size (from);
3557 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3558 EXPAND_MEMORY_USE_DONT);
3559
3560 /* Copy the rights of the bitmap. */
3561 if (current_function_check_memory_usage)
3562 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3563 XEXP (to_rtx, 0), Pmode,
3564 XEXP (from_rtx, 0), Pmode,
3565 convert_to_mode (TYPE_MODE (sizetype),
3566 size, TREE_UNSIGNED (sizetype)),
3567 TYPE_MODE (sizetype));
3568
3569 #ifdef TARGET_MEM_FUNCTIONS
3570 emit_library_call (memcpy_libfunc, 0,
3571 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3572 XEXP (from_rtx, 0), Pmode,
3573 convert_to_mode (TYPE_MODE (sizetype),
3574 size, TREE_UNSIGNED (sizetype)),
3575 TYPE_MODE (sizetype));
3576 #else
3577 emit_library_call (bcopy_libfunc, 0,
3578 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3579 XEXP (to_rtx, 0), Pmode,
3580 convert_to_mode (TYPE_MODE (integer_type_node),
3581 size, TREE_UNSIGNED (integer_type_node)),
3582 TYPE_MODE (integer_type_node));
3583 #endif
3584
3585 preserve_temp_slots (to_rtx);
3586 free_temp_slots ();
3587 pop_temp_slots ();
3588 return want_value ? to_rtx : NULL_RTX;
3589 }
3590
3591 /* Compute FROM and store the value in the rtx we got. */
3592
3593 push_temp_slots ();
3594 result = store_expr (from, to_rtx, want_value);
3595 preserve_temp_slots (result);
3596 free_temp_slots ();
3597 pop_temp_slots ();
3598 return want_value ? result : NULL_RTX;
3599 }
3600
3601 /* Generate code for computing expression EXP,
3602 and storing the value into TARGET.
3603 TARGET may contain a QUEUED rtx.
3604
3605 If WANT_VALUE is nonzero, return a copy of the value
3606 not in TARGET, so that we can be sure to use the proper
3607 value in a containing expression even if TARGET has something
3608 else stored in it. If possible, we copy the value through a pseudo
3609 and return that pseudo. Or, if the value is constant, we try to
3610 return the constant. In some cases, we return a pseudo
3611 copied *from* TARGET.
3612
3613 If the mode is BLKmode then we may return TARGET itself.
3614 It turns out that in BLKmode it doesn't cause a problem.
3615 because C has no operators that could combine two different
3616 assignments into the same BLKmode object with different values
3617 with no sequence point. Will other languages need this to
3618 be more thorough?
3619
3620 If WANT_VALUE is 0, we return NULL, to make sure
3621 to catch quickly any cases where the caller uses the value
3622 and fails to set WANT_VALUE. */
3623
3624 rtx
3625 store_expr (exp, target, want_value)
3626 register tree exp;
3627 register rtx target;
3628 int want_value;
3629 {
3630 register rtx temp;
3631 int dont_return_target = 0;
3632
3633 if (TREE_CODE (exp) == COMPOUND_EXPR)
3634 {
3635 /* Perform first part of compound expression, then assign from second
3636 part. */
3637 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3638 emit_queue ();
3639 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3640 }
3641 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3642 {
3643 /* For conditional expression, get safe form of the target. Then
3644 test the condition, doing the appropriate assignment on either
3645 side. This avoids the creation of unnecessary temporaries.
3646 For non-BLKmode, it is more efficient not to do this. */
3647
3648 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3649
3650 emit_queue ();
3651 target = protect_from_queue (target, 1);
3652
3653 do_pending_stack_adjust ();
3654 NO_DEFER_POP;
3655 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3656 start_cleanup_deferral ();
3657 store_expr (TREE_OPERAND (exp, 1), target, 0);
3658 end_cleanup_deferral ();
3659 emit_queue ();
3660 emit_jump_insn (gen_jump (lab2));
3661 emit_barrier ();
3662 emit_label (lab1);
3663 start_cleanup_deferral ();
3664 store_expr (TREE_OPERAND (exp, 2), target, 0);
3665 end_cleanup_deferral ();
3666 emit_queue ();
3667 emit_label (lab2);
3668 OK_DEFER_POP;
3669
3670 return want_value ? target : NULL_RTX;
3671 }
3672 else if (queued_subexp_p (target))
3673 /* If target contains a postincrement, let's not risk
3674 using it as the place to generate the rhs. */
3675 {
3676 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3677 {
3678 /* Expand EXP into a new pseudo. */
3679 temp = gen_reg_rtx (GET_MODE (target));
3680 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3681 }
3682 else
3683 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3684
3685 /* If target is volatile, ANSI requires accessing the value
3686 *from* the target, if it is accessed. So make that happen.
3687 In no case return the target itself. */
3688 if (! MEM_VOLATILE_P (target) && want_value)
3689 dont_return_target = 1;
3690 }
3691 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3692 && GET_MODE (target) != BLKmode)
3693 /* If target is in memory and caller wants value in a register instead,
3694 arrange that. Pass TARGET as target for expand_expr so that,
3695 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3696 We know expand_expr will not use the target in that case.
3697 Don't do this if TARGET is volatile because we are supposed
3698 to write it and then read it. */
3699 {
3700 temp = expand_expr (exp, target, GET_MODE (target), 0);
3701 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3702 temp = copy_to_reg (temp);
3703 dont_return_target = 1;
3704 }
3705 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3706 /* If this is an scalar in a register that is stored in a wider mode
3707 than the declared mode, compute the result into its declared mode
3708 and then convert to the wider mode. Our value is the computed
3709 expression. */
3710 {
3711 /* If we don't want a value, we can do the conversion inside EXP,
3712 which will often result in some optimizations. Do the conversion
3713 in two steps: first change the signedness, if needed, then
3714 the extend. But don't do this if the type of EXP is a subtype
3715 of something else since then the conversion might involve
3716 more than just converting modes. */
3717 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3718 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3719 {
3720 if (TREE_UNSIGNED (TREE_TYPE (exp))
3721 != SUBREG_PROMOTED_UNSIGNED_P (target))
3722 exp
3723 = convert
3724 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3725 TREE_TYPE (exp)),
3726 exp);
3727
3728 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3729 SUBREG_PROMOTED_UNSIGNED_P (target)),
3730 exp);
3731 }
3732
3733 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3734
3735 /* If TEMP is a volatile MEM and we want a result value, make
3736 the access now so it gets done only once. Likewise if
3737 it contains TARGET. */
3738 if (GET_CODE (temp) == MEM && want_value
3739 && (MEM_VOLATILE_P (temp)
3740 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3741 temp = copy_to_reg (temp);
3742
3743 /* If TEMP is a VOIDmode constant, use convert_modes to make
3744 sure that we properly convert it. */
3745 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3746 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3747 TYPE_MODE (TREE_TYPE (exp)), temp,
3748 SUBREG_PROMOTED_UNSIGNED_P (target));
3749
3750 convert_move (SUBREG_REG (target), temp,
3751 SUBREG_PROMOTED_UNSIGNED_P (target));
3752
3753 /* If we promoted a constant, change the mode back down to match
3754 target. Otherwise, the caller might get confused by a result whose
3755 mode is larger than expected. */
3756
3757 if (want_value && GET_MODE (temp) != GET_MODE (target)
3758 && GET_MODE (temp) != VOIDmode)
3759 {
3760 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3761 SUBREG_PROMOTED_VAR_P (temp) = 1;
3762 SUBREG_PROMOTED_UNSIGNED_P (temp)
3763 = SUBREG_PROMOTED_UNSIGNED_P (target);
3764 }
3765
3766 return want_value ? temp : NULL_RTX;
3767 }
3768 else
3769 {
3770 temp = expand_expr (exp, target, GET_MODE (target), 0);
3771 /* Return TARGET if it's a specified hardware register.
3772 If TARGET is a volatile mem ref, either return TARGET
3773 or return a reg copied *from* TARGET; ANSI requires this.
3774
3775 Otherwise, if TEMP is not TARGET, return TEMP
3776 if it is constant (for efficiency),
3777 or if we really want the correct value. */
3778 if (!(target && GET_CODE (target) == REG
3779 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3780 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3781 && ! rtx_equal_p (temp, target)
3782 && (CONSTANT_P (temp) || want_value))
3783 dont_return_target = 1;
3784 }
3785
3786 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3787 the same as that of TARGET, adjust the constant. This is needed, for
3788 example, in case it is a CONST_DOUBLE and we want only a word-sized
3789 value. */
3790 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3791 && TREE_CODE (exp) != ERROR_MARK
3792 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3793 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3794 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3795
3796 if (current_function_check_memory_usage
3797 && GET_CODE (target) == MEM
3798 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3799 {
3800 if (GET_CODE (temp) == MEM)
3801 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3802 XEXP (target, 0), Pmode,
3803 XEXP (temp, 0), Pmode,
3804 expr_size (exp), TYPE_MODE (sizetype));
3805 else
3806 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3807 XEXP (target, 0), Pmode,
3808 expr_size (exp), TYPE_MODE (sizetype),
3809 GEN_INT (MEMORY_USE_WO),
3810 TYPE_MODE (integer_type_node));
3811 }
3812
3813 /* If value was not generated in the target, store it there.
3814 Convert the value to TARGET's type first if nec. */
3815 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3816 one or both of them are volatile memory refs, we have to distinguish
3817 two cases:
3818 - expand_expr has used TARGET. In this case, we must not generate
3819 another copy. This can be detected by TARGET being equal according
3820 to == .
3821 - expand_expr has not used TARGET - that means that the source just
3822 happens to have the same RTX form. Since temp will have been created
3823 by expand_expr, it will compare unequal according to == .
3824 We must generate a copy in this case, to reach the correct number
3825 of volatile memory references. */
3826
3827 if ((! rtx_equal_p (temp, target)
3828 || (temp != target && (side_effects_p (temp)
3829 || side_effects_p (target))))
3830 && TREE_CODE (exp) != ERROR_MARK)
3831 {
3832 target = protect_from_queue (target, 1);
3833 if (GET_MODE (temp) != GET_MODE (target)
3834 && GET_MODE (temp) != VOIDmode)
3835 {
3836 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3837 if (dont_return_target)
3838 {
3839 /* In this case, we will return TEMP,
3840 so make sure it has the proper mode.
3841 But don't forget to store the value into TARGET. */
3842 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3843 emit_move_insn (target, temp);
3844 }
3845 else
3846 convert_move (target, temp, unsignedp);
3847 }
3848
3849 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3850 {
3851 /* Handle copying a string constant into an array.
3852 The string constant may be shorter than the array.
3853 So copy just the string's actual length, and clear the rest. */
3854 rtx size;
3855 rtx addr;
3856
3857 /* Get the size of the data type of the string,
3858 which is actually the size of the target. */
3859 size = expr_size (exp);
3860 if (GET_CODE (size) == CONST_INT
3861 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3862 emit_block_move (target, temp, size,
3863 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3864 else
3865 {
3866 /* Compute the size of the data to copy from the string. */
3867 tree copy_size
3868 = size_binop (MIN_EXPR,
3869 make_tree (sizetype, size),
3870 convert (sizetype,
3871 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3872 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3873 VOIDmode, 0);
3874 rtx label = 0;
3875
3876 /* Copy that much. */
3877 emit_block_move (target, temp, copy_size_rtx,
3878 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3879
3880 /* Figure out how much is left in TARGET that we have to clear.
3881 Do all calculations in ptr_mode. */
3882
3883 addr = XEXP (target, 0);
3884 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3885
3886 if (GET_CODE (copy_size_rtx) == CONST_INT)
3887 {
3888 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3889 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3890 }
3891 else
3892 {
3893 addr = force_reg (ptr_mode, addr);
3894 addr = expand_binop (ptr_mode, add_optab, addr,
3895 copy_size_rtx, NULL_RTX, 0,
3896 OPTAB_LIB_WIDEN);
3897
3898 size = expand_binop (ptr_mode, sub_optab, size,
3899 copy_size_rtx, NULL_RTX, 0,
3900 OPTAB_LIB_WIDEN);
3901
3902 label = gen_label_rtx ();
3903 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3904 GET_MODE (size), 0, 0, label);
3905 }
3906
3907 if (size != const0_rtx)
3908 {
3909 /* Be sure we can write on ADDR. */
3910 if (current_function_check_memory_usage)
3911 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3912 addr, Pmode,
3913 size, TYPE_MODE (sizetype),
3914 GEN_INT (MEMORY_USE_WO),
3915 TYPE_MODE (integer_type_node));
3916 #ifdef TARGET_MEM_FUNCTIONS
3917 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3918 addr, ptr_mode,
3919 const0_rtx, TYPE_MODE (integer_type_node),
3920 convert_to_mode (TYPE_MODE (sizetype),
3921 size,
3922 TREE_UNSIGNED (sizetype)),
3923 TYPE_MODE (sizetype));
3924 #else
3925 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3926 addr, ptr_mode,
3927 convert_to_mode (TYPE_MODE (integer_type_node),
3928 size,
3929 TREE_UNSIGNED (integer_type_node)),
3930 TYPE_MODE (integer_type_node));
3931 #endif
3932 }
3933
3934 if (label)
3935 emit_label (label);
3936 }
3937 }
3938 /* Handle calls that return values in multiple non-contiguous locations.
3939 The Irix 6 ABI has examples of this. */
3940 else if (GET_CODE (target) == PARALLEL)
3941 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3942 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3943 else if (GET_MODE (temp) == BLKmode)
3944 emit_block_move (target, temp, expr_size (exp),
3945 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3946 else
3947 emit_move_insn (target, temp);
3948 }
3949
3950 /* If we don't want a value, return NULL_RTX. */
3951 if (! want_value)
3952 return NULL_RTX;
3953
3954 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3955 ??? The latter test doesn't seem to make sense. */
3956 else if (dont_return_target && GET_CODE (temp) != MEM)
3957 return temp;
3958
3959 /* Return TARGET itself if it is a hard register. */
3960 else if (want_value && GET_MODE (target) != BLKmode
3961 && ! (GET_CODE (target) == REG
3962 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3963 return copy_to_reg (target);
3964
3965 else
3966 return target;
3967 }
3968 \f
3969 /* Return 1 if EXP just contains zeros. */
3970
3971 static int
3972 is_zeros_p (exp)
3973 tree exp;
3974 {
3975 tree elt;
3976
3977 switch (TREE_CODE (exp))
3978 {
3979 case CONVERT_EXPR:
3980 case NOP_EXPR:
3981 case NON_LVALUE_EXPR:
3982 return is_zeros_p (TREE_OPERAND (exp, 0));
3983
3984 case INTEGER_CST:
3985 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3986
3987 case COMPLEX_CST:
3988 return
3989 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3990
3991 case REAL_CST:
3992 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3993
3994 case CONSTRUCTOR:
3995 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3996 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3997 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3998 if (! is_zeros_p (TREE_VALUE (elt)))
3999 return 0;
4000
4001 return 1;
4002
4003 default:
4004 return 0;
4005 }
4006 }
4007
4008 /* Return 1 if EXP contains mostly (3/4) zeros. */
4009
4010 static int
4011 mostly_zeros_p (exp)
4012 tree exp;
4013 {
4014 if (TREE_CODE (exp) == CONSTRUCTOR)
4015 {
4016 int elts = 0, zeros = 0;
4017 tree elt = CONSTRUCTOR_ELTS (exp);
4018 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4019 {
4020 /* If there are no ranges of true bits, it is all zero. */
4021 return elt == NULL_TREE;
4022 }
4023 for (; elt; elt = TREE_CHAIN (elt))
4024 {
4025 /* We do not handle the case where the index is a RANGE_EXPR,
4026 so the statistic will be somewhat inaccurate.
4027 We do make a more accurate count in store_constructor itself,
4028 so since this function is only used for nested array elements,
4029 this should be close enough. */
4030 if (mostly_zeros_p (TREE_VALUE (elt)))
4031 zeros++;
4032 elts++;
4033 }
4034
4035 return 4 * zeros >= 3 * elts;
4036 }
4037
4038 return is_zeros_p (exp);
4039 }
4040 \f
4041 /* Helper function for store_constructor.
4042 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4043 TYPE is the type of the CONSTRUCTOR, not the element type.
4044 ALIGN and CLEARED are as for store_constructor.
4045
4046 This provides a recursive shortcut back to store_constructor when it isn't
4047 necessary to go through store_field. This is so that we can pass through
4048 the cleared field to let store_constructor know that we may not have to
4049 clear a substructure if the outer structure has already been cleared. */
4050
4051 static void
4052 store_constructor_field (target, bitsize, bitpos,
4053 mode, exp, type, align, cleared)
4054 rtx target;
4055 int bitsize, bitpos;
4056 enum machine_mode mode;
4057 tree exp, type;
4058 int align;
4059 int cleared;
4060 {
4061 if (TREE_CODE (exp) == CONSTRUCTOR
4062 && bitpos % BITS_PER_UNIT == 0
4063 /* If we have a non-zero bitpos for a register target, then we just
4064 let store_field do the bitfield handling. This is unlikely to
4065 generate unnecessary clear instructions anyways. */
4066 && (bitpos == 0 || GET_CODE (target) == MEM))
4067 {
4068 if (bitpos != 0)
4069 target
4070 = change_address (target,
4071 GET_MODE (target) == BLKmode
4072 || 0 != (bitpos
4073 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4074 ? BLKmode : VOIDmode,
4075 plus_constant (XEXP (target, 0),
4076 bitpos / BITS_PER_UNIT));
4077 store_constructor (exp, target, align, cleared);
4078 }
4079 else
4080 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4081 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4082 int_size_in_bytes (type), cleared);
4083 }
4084
4085 /* Store the value of constructor EXP into the rtx TARGET.
4086 TARGET is either a REG or a MEM.
4087 ALIGN is the maximum known alignment for TARGET, in bits.
4088 CLEARED is true if TARGET is known to have been zero'd. */
4089
4090 static void
4091 store_constructor (exp, target, align, cleared)
4092 tree exp;
4093 rtx target;
4094 int align;
4095 int cleared;
4096 {
4097 tree type = TREE_TYPE (exp);
4098 #ifdef WORD_REGISTER_OPERATIONS
4099 rtx exp_size = expr_size (exp);
4100 #endif
4101
4102 /* We know our target cannot conflict, since safe_from_p has been called. */
4103 #if 0
4104 /* Don't try copying piece by piece into a hard register
4105 since that is vulnerable to being clobbered by EXP.
4106 Instead, construct in a pseudo register and then copy it all. */
4107 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4108 {
4109 rtx temp = gen_reg_rtx (GET_MODE (target));
4110 store_constructor (exp, temp, 0);
4111 emit_move_insn (target, temp);
4112 return;
4113 }
4114 #endif
4115
4116 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4117 || TREE_CODE (type) == QUAL_UNION_TYPE)
4118 {
4119 register tree elt;
4120
4121 /* Inform later passes that the whole union value is dead. */
4122 if ((TREE_CODE (type) == UNION_TYPE
4123 || TREE_CODE (type) == QUAL_UNION_TYPE)
4124 && ! cleared)
4125 {
4126 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4127
4128 /* If the constructor is empty, clear the union. */
4129 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4130 clear_storage (target, expr_size (exp),
4131 TYPE_ALIGN (type) / BITS_PER_UNIT);
4132 }
4133
4134 /* If we are building a static constructor into a register,
4135 set the initial value as zero so we can fold the value into
4136 a constant. But if more than one register is involved,
4137 this probably loses. */
4138 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4139 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4140 {
4141 if (! cleared)
4142 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4143
4144 cleared = 1;
4145 }
4146
4147 /* If the constructor has fewer fields than the structure
4148 or if we are initializing the structure to mostly zeros,
4149 clear the whole structure first. */
4150 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4151 != list_length (TYPE_FIELDS (type)))
4152 || mostly_zeros_p (exp))
4153 {
4154 if (! cleared)
4155 clear_storage (target, expr_size (exp),
4156 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4157
4158 cleared = 1;
4159 }
4160 else if (! cleared)
4161 /* Inform later passes that the old value is dead. */
4162 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4163
4164 /* Store each element of the constructor into
4165 the corresponding field of TARGET. */
4166
4167 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4168 {
4169 register tree field = TREE_PURPOSE (elt);
4170 #ifdef WORD_REGISTER_OPERATIONS
4171 tree value = TREE_VALUE (elt);
4172 #endif
4173 register enum machine_mode mode;
4174 int bitsize;
4175 int bitpos = 0;
4176 int unsignedp;
4177 tree pos, constant = 0, offset = 0;
4178 rtx to_rtx = target;
4179
4180 /* Just ignore missing fields.
4181 We cleared the whole structure, above,
4182 if any fields are missing. */
4183 if (field == 0)
4184 continue;
4185
4186 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4187 continue;
4188
4189 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4190 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4191 else
4192 bitsize = -1;
4193
4194 unsignedp = TREE_UNSIGNED (field);
4195 mode = DECL_MODE (field);
4196 if (DECL_BIT_FIELD (field))
4197 mode = VOIDmode;
4198
4199 pos = DECL_FIELD_BITPOS (field);
4200 if (TREE_CODE (pos) == INTEGER_CST)
4201 constant = pos;
4202 else if (TREE_CODE (pos) == PLUS_EXPR
4203 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4204 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4205 else
4206 offset = pos;
4207
4208 if (constant)
4209 bitpos = TREE_INT_CST_LOW (constant);
4210
4211 if (offset)
4212 {
4213 rtx offset_rtx;
4214
4215 if (contains_placeholder_p (offset))
4216 offset = build (WITH_RECORD_EXPR, sizetype,
4217 offset, make_tree (TREE_TYPE (exp), target));
4218
4219 offset = size_binop (EXACT_DIV_EXPR, offset,
4220 size_int (BITS_PER_UNIT));
4221
4222 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4223 if (GET_CODE (to_rtx) != MEM)
4224 abort ();
4225
4226 if (GET_MODE (offset_rtx) != ptr_mode)
4227 {
4228 #ifdef POINTERS_EXTEND_UNSIGNED
4229 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4230 #else
4231 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4232 #endif
4233 }
4234
4235 to_rtx
4236 = change_address (to_rtx, VOIDmode,
4237 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4238 force_reg (ptr_mode,
4239 offset_rtx)));
4240 }
4241
4242 if (TREE_READONLY (field))
4243 {
4244 if (GET_CODE (to_rtx) == MEM)
4245 to_rtx = copy_rtx (to_rtx);
4246
4247 RTX_UNCHANGING_P (to_rtx) = 1;
4248 }
4249
4250 #ifdef WORD_REGISTER_OPERATIONS
4251 /* If this initializes a field that is smaller than a word, at the
4252 start of a word, try to widen it to a full word.
4253 This special case allows us to output C++ member function
4254 initializations in a form that the optimizers can understand. */
4255 if (constant
4256 && GET_CODE (target) == REG
4257 && bitsize < BITS_PER_WORD
4258 && bitpos % BITS_PER_WORD == 0
4259 && GET_MODE_CLASS (mode) == MODE_INT
4260 && TREE_CODE (value) == INTEGER_CST
4261 && GET_CODE (exp_size) == CONST_INT
4262 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4263 {
4264 tree type = TREE_TYPE (value);
4265 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4266 {
4267 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4268 value = convert (type, value);
4269 }
4270 if (BYTES_BIG_ENDIAN)
4271 value
4272 = fold (build (LSHIFT_EXPR, type, value,
4273 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4274 bitsize = BITS_PER_WORD;
4275 mode = word_mode;
4276 }
4277 #endif
4278 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4279 TREE_VALUE (elt), type,
4280 MIN (align,
4281 DECL_ALIGN (TREE_PURPOSE (elt))),
4282 cleared);
4283 }
4284 }
4285 else if (TREE_CODE (type) == ARRAY_TYPE)
4286 {
4287 register tree elt;
4288 register int i;
4289 int need_to_clear;
4290 tree domain = TYPE_DOMAIN (type);
4291 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4292 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4293 tree elttype = TREE_TYPE (type);
4294
4295 /* If the constructor has fewer elements than the array,
4296 clear the whole array first. Similarly if this is
4297 static constructor of a non-BLKmode object. */
4298 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4299 need_to_clear = 1;
4300 else
4301 {
4302 HOST_WIDE_INT count = 0, zero_count = 0;
4303 need_to_clear = 0;
4304 /* This loop is a more accurate version of the loop in
4305 mostly_zeros_p (it handles RANGE_EXPR in an index).
4306 It is also needed to check for missing elements. */
4307 for (elt = CONSTRUCTOR_ELTS (exp);
4308 elt != NULL_TREE;
4309 elt = TREE_CHAIN (elt))
4310 {
4311 tree index = TREE_PURPOSE (elt);
4312 HOST_WIDE_INT this_node_count;
4313 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4314 {
4315 tree lo_index = TREE_OPERAND (index, 0);
4316 tree hi_index = TREE_OPERAND (index, 1);
4317 if (TREE_CODE (lo_index) != INTEGER_CST
4318 || TREE_CODE (hi_index) != INTEGER_CST)
4319 {
4320 need_to_clear = 1;
4321 break;
4322 }
4323 this_node_count = TREE_INT_CST_LOW (hi_index)
4324 - TREE_INT_CST_LOW (lo_index) + 1;
4325 }
4326 else
4327 this_node_count = 1;
4328 count += this_node_count;
4329 if (mostly_zeros_p (TREE_VALUE (elt)))
4330 zero_count += this_node_count;
4331 }
4332 /* Clear the entire array first if there are any missing elements,
4333 or if the incidence of zero elements is >= 75%. */
4334 if (count < maxelt - minelt + 1
4335 || 4 * zero_count >= 3 * count)
4336 need_to_clear = 1;
4337 }
4338 if (need_to_clear)
4339 {
4340 if (! cleared)
4341 clear_storage (target, expr_size (exp),
4342 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4343 cleared = 1;
4344 }
4345 else
4346 /* Inform later passes that the old value is dead. */
4347 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4348
4349 /* Store each element of the constructor into
4350 the corresponding element of TARGET, determined
4351 by counting the elements. */
4352 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4353 elt;
4354 elt = TREE_CHAIN (elt), i++)
4355 {
4356 register enum machine_mode mode;
4357 int bitsize;
4358 int bitpos;
4359 int unsignedp;
4360 tree value = TREE_VALUE (elt);
4361 int align = TYPE_ALIGN (TREE_TYPE (value));
4362 tree index = TREE_PURPOSE (elt);
4363 rtx xtarget = target;
4364
4365 if (cleared && is_zeros_p (value))
4366 continue;
4367
4368 unsignedp = TREE_UNSIGNED (elttype);
4369 mode = TYPE_MODE (elttype);
4370 if (mode == BLKmode)
4371 {
4372 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4373 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4374 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4375 else
4376 bitsize = -1;
4377 }
4378 else
4379 bitsize = GET_MODE_BITSIZE (mode);
4380
4381 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4382 {
4383 tree lo_index = TREE_OPERAND (index, 0);
4384 tree hi_index = TREE_OPERAND (index, 1);
4385 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4386 struct nesting *loop;
4387 HOST_WIDE_INT lo, hi, count;
4388 tree position;
4389
4390 /* If the range is constant and "small", unroll the loop. */
4391 if (TREE_CODE (lo_index) == INTEGER_CST
4392 && TREE_CODE (hi_index) == INTEGER_CST
4393 && (lo = TREE_INT_CST_LOW (lo_index),
4394 hi = TREE_INT_CST_LOW (hi_index),
4395 count = hi - lo + 1,
4396 (GET_CODE (target) != MEM
4397 || count <= 2
4398 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4399 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4400 <= 40 * 8))))
4401 {
4402 lo -= minelt; hi -= minelt;
4403 for (; lo <= hi; lo++)
4404 {
4405 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4406 store_constructor_field (target, bitsize, bitpos, mode,
4407 value, type, align, cleared);
4408 }
4409 }
4410 else
4411 {
4412 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4413 loop_top = gen_label_rtx ();
4414 loop_end = gen_label_rtx ();
4415
4416 unsignedp = TREE_UNSIGNED (domain);
4417
4418 index = build_decl (VAR_DECL, NULL_TREE, domain);
4419
4420 DECL_RTL (index) = index_r
4421 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4422 &unsignedp, 0));
4423
4424 if (TREE_CODE (value) == SAVE_EXPR
4425 && SAVE_EXPR_RTL (value) == 0)
4426 {
4427 /* Make sure value gets expanded once before the
4428 loop. */
4429 expand_expr (value, const0_rtx, VOIDmode, 0);
4430 emit_queue ();
4431 }
4432 store_expr (lo_index, index_r, 0);
4433 loop = expand_start_loop (0);
4434
4435 /* Assign value to element index. */
4436 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4437 size_int (BITS_PER_UNIT));
4438 position = size_binop (MULT_EXPR,
4439 size_binop (MINUS_EXPR, index,
4440 TYPE_MIN_VALUE (domain)),
4441 position);
4442 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4443 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4444 xtarget = change_address (target, mode, addr);
4445 if (TREE_CODE (value) == CONSTRUCTOR)
4446 store_constructor (value, xtarget, align, cleared);
4447 else
4448 store_expr (value, xtarget, 0);
4449
4450 expand_exit_loop_if_false (loop,
4451 build (LT_EXPR, integer_type_node,
4452 index, hi_index));
4453
4454 expand_increment (build (PREINCREMENT_EXPR,
4455 TREE_TYPE (index),
4456 index, integer_one_node), 0, 0);
4457 expand_end_loop ();
4458 emit_label (loop_end);
4459
4460 /* Needed by stupid register allocation. to extend the
4461 lifetime of pseudo-regs used by target past the end
4462 of the loop. */
4463 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4464 }
4465 }
4466 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4467 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4468 {
4469 rtx pos_rtx, addr;
4470 tree position;
4471
4472 if (index == 0)
4473 index = size_int (i);
4474
4475 if (minelt)
4476 index = size_binop (MINUS_EXPR, index,
4477 TYPE_MIN_VALUE (domain));
4478 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4479 size_int (BITS_PER_UNIT));
4480 position = size_binop (MULT_EXPR, index, position);
4481 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4482 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4483 xtarget = change_address (target, mode, addr);
4484 store_expr (value, xtarget, 0);
4485 }
4486 else
4487 {
4488 if (index != 0)
4489 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4490 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4491 else
4492 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4493 store_constructor_field (target, bitsize, bitpos, mode, value,
4494 type, align, cleared);
4495 }
4496 }
4497 }
4498 /* set constructor assignments */
4499 else if (TREE_CODE (type) == SET_TYPE)
4500 {
4501 tree elt = CONSTRUCTOR_ELTS (exp);
4502 int nbytes = int_size_in_bytes (type), nbits;
4503 tree domain = TYPE_DOMAIN (type);
4504 tree domain_min, domain_max, bitlength;
4505
4506 /* The default implementation strategy is to extract the constant
4507 parts of the constructor, use that to initialize the target,
4508 and then "or" in whatever non-constant ranges we need in addition.
4509
4510 If a large set is all zero or all ones, it is
4511 probably better to set it using memset (if available) or bzero.
4512 Also, if a large set has just a single range, it may also be
4513 better to first clear all the first clear the set (using
4514 bzero/memset), and set the bits we want. */
4515
4516 /* Check for all zeros. */
4517 if (elt == NULL_TREE)
4518 {
4519 if (!cleared)
4520 clear_storage (target, expr_size (exp),
4521 TYPE_ALIGN (type) / BITS_PER_UNIT);
4522 return;
4523 }
4524
4525 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4526 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4527 bitlength = size_binop (PLUS_EXPR,
4528 size_binop (MINUS_EXPR, domain_max, domain_min),
4529 size_one_node);
4530
4531 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4532 abort ();
4533 nbits = TREE_INT_CST_LOW (bitlength);
4534
4535 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4536 are "complicated" (more than one range), initialize (the
4537 constant parts) by copying from a constant. */
4538 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4539 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4540 {
4541 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4542 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4543 char *bit_buffer = (char *) alloca (nbits);
4544 HOST_WIDE_INT word = 0;
4545 int bit_pos = 0;
4546 int ibit = 0;
4547 int offset = 0; /* In bytes from beginning of set. */
4548 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4549 for (;;)
4550 {
4551 if (bit_buffer[ibit])
4552 {
4553 if (BYTES_BIG_ENDIAN)
4554 word |= (1 << (set_word_size - 1 - bit_pos));
4555 else
4556 word |= 1 << bit_pos;
4557 }
4558 bit_pos++; ibit++;
4559 if (bit_pos >= set_word_size || ibit == nbits)
4560 {
4561 if (word != 0 || ! cleared)
4562 {
4563 rtx datum = GEN_INT (word);
4564 rtx to_rtx;
4565 /* The assumption here is that it is safe to use
4566 XEXP if the set is multi-word, but not if
4567 it's single-word. */
4568 if (GET_CODE (target) == MEM)
4569 {
4570 to_rtx = plus_constant (XEXP (target, 0), offset);
4571 to_rtx = change_address (target, mode, to_rtx);
4572 }
4573 else if (offset == 0)
4574 to_rtx = target;
4575 else
4576 abort ();
4577 emit_move_insn (to_rtx, datum);
4578 }
4579 if (ibit == nbits)
4580 break;
4581 word = 0;
4582 bit_pos = 0;
4583 offset += set_word_size / BITS_PER_UNIT;
4584 }
4585 }
4586 }
4587 else if (!cleared)
4588 {
4589 /* Don't bother clearing storage if the set is all ones. */
4590 if (TREE_CHAIN (elt) != NULL_TREE
4591 || (TREE_PURPOSE (elt) == NULL_TREE
4592 ? nbits != 1
4593 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4594 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4595 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4596 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4597 != nbits))))
4598 clear_storage (target, expr_size (exp),
4599 TYPE_ALIGN (type) / BITS_PER_UNIT);
4600 }
4601
4602 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4603 {
4604 /* start of range of element or NULL */
4605 tree startbit = TREE_PURPOSE (elt);
4606 /* end of range of element, or element value */
4607 tree endbit = TREE_VALUE (elt);
4608 #ifdef TARGET_MEM_FUNCTIONS
4609 HOST_WIDE_INT startb, endb;
4610 #endif
4611 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4612
4613 bitlength_rtx = expand_expr (bitlength,
4614 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4615
4616 /* handle non-range tuple element like [ expr ] */
4617 if (startbit == NULL_TREE)
4618 {
4619 startbit = save_expr (endbit);
4620 endbit = startbit;
4621 }
4622 startbit = convert (sizetype, startbit);
4623 endbit = convert (sizetype, endbit);
4624 if (! integer_zerop (domain_min))
4625 {
4626 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4627 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4628 }
4629 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4630 EXPAND_CONST_ADDRESS);
4631 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4632 EXPAND_CONST_ADDRESS);
4633
4634 if (REG_P (target))
4635 {
4636 targetx = assign_stack_temp (GET_MODE (target),
4637 GET_MODE_SIZE (GET_MODE (target)),
4638 0);
4639 emit_move_insn (targetx, target);
4640 }
4641 else if (GET_CODE (target) == MEM)
4642 targetx = target;
4643 else
4644 abort ();
4645
4646 #ifdef TARGET_MEM_FUNCTIONS
4647 /* Optimization: If startbit and endbit are
4648 constants divisible by BITS_PER_UNIT,
4649 call memset instead. */
4650 if (TREE_CODE (startbit) == INTEGER_CST
4651 && TREE_CODE (endbit) == INTEGER_CST
4652 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4653 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4654 {
4655 emit_library_call (memset_libfunc, 0,
4656 VOIDmode, 3,
4657 plus_constant (XEXP (targetx, 0),
4658 startb / BITS_PER_UNIT),
4659 Pmode,
4660 constm1_rtx, TYPE_MODE (integer_type_node),
4661 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4662 TYPE_MODE (sizetype));
4663 }
4664 else
4665 #endif
4666 {
4667 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4668 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4669 bitlength_rtx, TYPE_MODE (sizetype),
4670 startbit_rtx, TYPE_MODE (sizetype),
4671 endbit_rtx, TYPE_MODE (sizetype));
4672 }
4673 if (REG_P (target))
4674 emit_move_insn (target, targetx);
4675 }
4676 }
4677
4678 else
4679 abort ();
4680 }
4681
4682 /* Store the value of EXP (an expression tree)
4683 into a subfield of TARGET which has mode MODE and occupies
4684 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4685 If MODE is VOIDmode, it means that we are storing into a bit-field.
4686
4687 If VALUE_MODE is VOIDmode, return nothing in particular.
4688 UNSIGNEDP is not used in this case.
4689
4690 Otherwise, return an rtx for the value stored. This rtx
4691 has mode VALUE_MODE if that is convenient to do.
4692 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4693
4694 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4695 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4696
4697 ALIAS_SET is the alias set for the destination. This value will
4698 (in general) be different from that for TARGET, since TARGET is a
4699 reference to the containing structure. */
4700
4701 static rtx
4702 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4703 unsignedp, align, total_size, alias_set)
4704 rtx target;
4705 int bitsize, bitpos;
4706 enum machine_mode mode;
4707 tree exp;
4708 enum machine_mode value_mode;
4709 int unsignedp;
4710 int align;
4711 int total_size;
4712 int alias_set;
4713 {
4714 HOST_WIDE_INT width_mask = 0;
4715
4716 if (TREE_CODE (exp) == ERROR_MARK)
4717 return const0_rtx;
4718
4719 if (bitsize < HOST_BITS_PER_WIDE_INT)
4720 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4721
4722 /* If we are storing into an unaligned field of an aligned union that is
4723 in a register, we may have the mode of TARGET being an integer mode but
4724 MODE == BLKmode. In that case, get an aligned object whose size and
4725 alignment are the same as TARGET and store TARGET into it (we can avoid
4726 the store if the field being stored is the entire width of TARGET). Then
4727 call ourselves recursively to store the field into a BLKmode version of
4728 that object. Finally, load from the object into TARGET. This is not
4729 very efficient in general, but should only be slightly more expensive
4730 than the otherwise-required unaligned accesses. Perhaps this can be
4731 cleaned up later. */
4732
4733 if (mode == BLKmode
4734 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4735 {
4736 rtx object = assign_stack_temp (GET_MODE (target),
4737 GET_MODE_SIZE (GET_MODE (target)), 0);
4738 rtx blk_object = copy_rtx (object);
4739
4740 MEM_SET_IN_STRUCT_P (object, 1);
4741 MEM_SET_IN_STRUCT_P (blk_object, 1);
4742 PUT_MODE (blk_object, BLKmode);
4743
4744 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4745 emit_move_insn (object, target);
4746
4747 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4748 align, total_size, alias_set);
4749
4750 /* Even though we aren't returning target, we need to
4751 give it the updated value. */
4752 emit_move_insn (target, object);
4753
4754 return blk_object;
4755 }
4756
4757 /* If the structure is in a register or if the component
4758 is a bit field, we cannot use addressing to access it.
4759 Use bit-field techniques or SUBREG to store in it. */
4760
4761 if (mode == VOIDmode
4762 || (mode != BLKmode && ! direct_store[(int) mode]
4763 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4764 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4765 || GET_CODE (target) == REG
4766 || GET_CODE (target) == SUBREG
4767 /* If the field isn't aligned enough to store as an ordinary memref,
4768 store it as a bit field. */
4769 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS
4770 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4771 || bitpos % GET_MODE_ALIGNMENT (mode)))
4772 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS
4773 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4774 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4775 /* If the RHS and field are a constant size and the size of the
4776 RHS isn't the same size as the bitfield, we must use bitfield
4777 operations. */
4778 || ((bitsize >= 0
4779 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
4780 && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp))) != 0
4781 || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) != bitsize)))
4782 {
4783 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4784
4785 /* If BITSIZE is narrower than the size of the type of EXP
4786 we will be narrowing TEMP. Normally, what's wanted are the
4787 low-order bits. However, if EXP's type is a record and this is
4788 big-endian machine, we want the upper BITSIZE bits. */
4789 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4790 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4791 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4792 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4793 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4794 - bitsize),
4795 temp, 1);
4796
4797 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4798 MODE. */
4799 if (mode != VOIDmode && mode != BLKmode
4800 && mode != TYPE_MODE (TREE_TYPE (exp)))
4801 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4802
4803 /* If the modes of TARGET and TEMP are both BLKmode, both
4804 must be in memory and BITPOS must be aligned on a byte
4805 boundary. If so, we simply do a block copy. */
4806 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4807 {
4808 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4809 || bitpos % BITS_PER_UNIT != 0)
4810 abort ();
4811
4812 target = change_address (target, VOIDmode,
4813 plus_constant (XEXP (target, 0),
4814 bitpos / BITS_PER_UNIT));
4815
4816 /* Find an alignment that is consistent with the bit position. */
4817 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4818 align >>= 1;
4819
4820 emit_block_move (target, temp,
4821 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4822 / BITS_PER_UNIT),
4823 align);
4824
4825 return value_mode == VOIDmode ? const0_rtx : target;
4826 }
4827
4828 /* Store the value in the bitfield. */
4829 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4830 if (value_mode != VOIDmode)
4831 {
4832 /* The caller wants an rtx for the value. */
4833 /* If possible, avoid refetching from the bitfield itself. */
4834 if (width_mask != 0
4835 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4836 {
4837 tree count;
4838 enum machine_mode tmode;
4839
4840 if (unsignedp)
4841 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4842 tmode = GET_MODE (temp);
4843 if (tmode == VOIDmode)
4844 tmode = value_mode;
4845 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4846 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4847 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4848 }
4849 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4850 NULL_RTX, value_mode, 0, align,
4851 total_size);
4852 }
4853 return const0_rtx;
4854 }
4855 else
4856 {
4857 rtx addr = XEXP (target, 0);
4858 rtx to_rtx;
4859
4860 /* If a value is wanted, it must be the lhs;
4861 so make the address stable for multiple use. */
4862
4863 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4864 && ! CONSTANT_ADDRESS_P (addr)
4865 /* A frame-pointer reference is already stable. */
4866 && ! (GET_CODE (addr) == PLUS
4867 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4868 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4869 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4870 addr = copy_to_reg (addr);
4871
4872 /* Now build a reference to just the desired component. */
4873
4874 to_rtx = copy_rtx (change_address (target, mode,
4875 plus_constant (addr,
4876 (bitpos
4877 / BITS_PER_UNIT))));
4878 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4879 MEM_ALIAS_SET (to_rtx) = alias_set;
4880
4881 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4882 }
4883 }
4884 \f
4885 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4886 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4887 ARRAY_REFs and find the ultimate containing object, which we return.
4888
4889 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4890 bit position, and *PUNSIGNEDP to the signedness of the field.
4891 If the position of the field is variable, we store a tree
4892 giving the variable offset (in units) in *POFFSET.
4893 This offset is in addition to the bit position.
4894 If the position is not variable, we store 0 in *POFFSET.
4895 We set *PALIGNMENT to the alignment in bytes of the address that will be
4896 computed. This is the alignment of the thing we return if *POFFSET
4897 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4898
4899 If any of the extraction expressions is volatile,
4900 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4901
4902 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4903 is a mode that can be used to access the field. In that case, *PBITSIZE
4904 is redundant.
4905
4906 If the field describes a variable-sized object, *PMODE is set to
4907 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4908 this case, but the address of the object can be found. */
4909
4910 tree
4911 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4912 punsignedp, pvolatilep, palignment)
4913 tree exp;
4914 int *pbitsize;
4915 int *pbitpos;
4916 tree *poffset;
4917 enum machine_mode *pmode;
4918 int *punsignedp;
4919 int *pvolatilep;
4920 int *palignment;
4921 {
4922 tree orig_exp = exp;
4923 tree size_tree = 0;
4924 enum machine_mode mode = VOIDmode;
4925 tree offset = integer_zero_node;
4926 unsigned int alignment = BIGGEST_ALIGNMENT;
4927
4928 if (TREE_CODE (exp) == COMPONENT_REF)
4929 {
4930 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4931 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4932 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4933 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4934 }
4935 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4936 {
4937 size_tree = TREE_OPERAND (exp, 1);
4938 *punsignedp = TREE_UNSIGNED (exp);
4939 }
4940 else
4941 {
4942 mode = TYPE_MODE (TREE_TYPE (exp));
4943 if (mode == BLKmode)
4944 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4945
4946 *pbitsize = GET_MODE_BITSIZE (mode);
4947 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4948 }
4949
4950 if (size_tree)
4951 {
4952 if (TREE_CODE (size_tree) != INTEGER_CST)
4953 mode = BLKmode, *pbitsize = -1;
4954 else
4955 *pbitsize = TREE_INT_CST_LOW (size_tree);
4956 }
4957
4958 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4959 and find the ultimate containing object. */
4960
4961 *pbitpos = 0;
4962
4963 while (1)
4964 {
4965 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4966 {
4967 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4968 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4969 : TREE_OPERAND (exp, 2));
4970 tree constant = integer_zero_node, var = pos;
4971
4972 /* If this field hasn't been filled in yet, don't go
4973 past it. This should only happen when folding expressions
4974 made during type construction. */
4975 if (pos == 0)
4976 break;
4977
4978 /* Assume here that the offset is a multiple of a unit.
4979 If not, there should be an explicitly added constant. */
4980 if (TREE_CODE (pos) == PLUS_EXPR
4981 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4982 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4983 else if (TREE_CODE (pos) == INTEGER_CST)
4984 constant = pos, var = integer_zero_node;
4985
4986 *pbitpos += TREE_INT_CST_LOW (constant);
4987 offset = size_binop (PLUS_EXPR, offset,
4988 size_binop (EXACT_DIV_EXPR, var,
4989 size_int (BITS_PER_UNIT)));
4990 }
4991
4992 else if (TREE_CODE (exp) == ARRAY_REF)
4993 {
4994 /* This code is based on the code in case ARRAY_REF in expand_expr
4995 below. We assume here that the size of an array element is
4996 always an integral multiple of BITS_PER_UNIT. */
4997
4998 tree index = TREE_OPERAND (exp, 1);
4999 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5000 tree low_bound
5001 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5002 tree index_type = TREE_TYPE (index);
5003 tree xindex;
5004
5005 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5006 {
5007 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5008 index);
5009 index_type = TREE_TYPE (index);
5010 }
5011
5012 /* Optimize the special-case of a zero lower bound.
5013
5014 We convert the low_bound to sizetype to avoid some problems
5015 with constant folding. (E.g. suppose the lower bound is 1,
5016 and its mode is QI. Without the conversion, (ARRAY
5017 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5018 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5019
5020 But sizetype isn't quite right either (especially if
5021 the lowbound is negative). FIXME */
5022
5023 if (! integer_zerop (low_bound))
5024 index = fold (build (MINUS_EXPR, index_type, index,
5025 convert (sizetype, low_bound)));
5026
5027 if (TREE_CODE (index) == INTEGER_CST)
5028 {
5029 index = convert (sbitsizetype, index);
5030 index_type = TREE_TYPE (index);
5031 }
5032
5033 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5034 convert (sbitsizetype,
5035 TYPE_SIZE (TREE_TYPE (exp)))));
5036
5037 if (TREE_CODE (xindex) == INTEGER_CST
5038 && TREE_INT_CST_HIGH (xindex) == 0)
5039 *pbitpos += TREE_INT_CST_LOW (xindex);
5040 else
5041 {
5042 /* Either the bit offset calculated above is not constant, or
5043 it overflowed. In either case, redo the multiplication
5044 against the size in units. This is especially important
5045 in the non-constant case to avoid a division at runtime. */
5046 xindex = fold (build (MULT_EXPR, ssizetype, index,
5047 convert (ssizetype,
5048 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5049
5050 if (contains_placeholder_p (xindex))
5051 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5052
5053 offset = size_binop (PLUS_EXPR, offset, xindex);
5054 }
5055 }
5056 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5057 && ! ((TREE_CODE (exp) == NOP_EXPR
5058 || TREE_CODE (exp) == CONVERT_EXPR)
5059 && (TYPE_MODE (TREE_TYPE (exp))
5060 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5061 break;
5062
5063 /* If any reference in the chain is volatile, the effect is volatile. */
5064 if (TREE_THIS_VOLATILE (exp))
5065 *pvolatilep = 1;
5066
5067 /* If the offset is non-constant already, then we can't assume any
5068 alignment more than the alignment here. */
5069 if (! integer_zerop (offset))
5070 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5071
5072 exp = TREE_OPERAND (exp, 0);
5073 }
5074
5075 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5076 alignment = MIN (alignment, DECL_ALIGN (exp));
5077 else if (TREE_TYPE (exp) != 0)
5078 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5079
5080 if (integer_zerop (offset))
5081 offset = 0;
5082
5083 if (offset != 0 && contains_placeholder_p (offset))
5084 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5085
5086 *pmode = mode;
5087 *poffset = offset;
5088 *palignment = alignment / BITS_PER_UNIT;
5089 return exp;
5090 }
5091
5092 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5093 static enum memory_use_mode
5094 get_memory_usage_from_modifier (modifier)
5095 enum expand_modifier modifier;
5096 {
5097 switch (modifier)
5098 {
5099 case EXPAND_NORMAL:
5100 case EXPAND_SUM:
5101 return MEMORY_USE_RO;
5102 break;
5103 case EXPAND_MEMORY_USE_WO:
5104 return MEMORY_USE_WO;
5105 break;
5106 case EXPAND_MEMORY_USE_RW:
5107 return MEMORY_USE_RW;
5108 break;
5109 case EXPAND_MEMORY_USE_DONT:
5110 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5111 MEMORY_USE_DONT, because they are modifiers to a call of
5112 expand_expr in the ADDR_EXPR case of expand_expr. */
5113 case EXPAND_CONST_ADDRESS:
5114 case EXPAND_INITIALIZER:
5115 return MEMORY_USE_DONT;
5116 case EXPAND_MEMORY_USE_BAD:
5117 default:
5118 abort ();
5119 }
5120 }
5121 \f
5122 /* Given an rtx VALUE that may contain additions and multiplications,
5123 return an equivalent value that just refers to a register or memory.
5124 This is done by generating instructions to perform the arithmetic
5125 and returning a pseudo-register containing the value.
5126
5127 The returned value may be a REG, SUBREG, MEM or constant. */
5128
5129 rtx
5130 force_operand (value, target)
5131 rtx value, target;
5132 {
5133 register optab binoptab = 0;
5134 /* Use a temporary to force order of execution of calls to
5135 `force_operand'. */
5136 rtx tmp;
5137 register rtx op2;
5138 /* Use subtarget as the target for operand 0 of a binary operation. */
5139 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5140
5141 /* Check for a PIC address load. */
5142 if (flag_pic
5143 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5144 && XEXP (value, 0) == pic_offset_table_rtx
5145 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5146 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5147 || GET_CODE (XEXP (value, 1)) == CONST))
5148 {
5149 if (!subtarget)
5150 subtarget = gen_reg_rtx (GET_MODE (value));
5151 emit_move_insn (subtarget, value);
5152 return subtarget;
5153 }
5154
5155 if (GET_CODE (value) == PLUS)
5156 binoptab = add_optab;
5157 else if (GET_CODE (value) == MINUS)
5158 binoptab = sub_optab;
5159 else if (GET_CODE (value) == MULT)
5160 {
5161 op2 = XEXP (value, 1);
5162 if (!CONSTANT_P (op2)
5163 && !(GET_CODE (op2) == REG && op2 != subtarget))
5164 subtarget = 0;
5165 tmp = force_operand (XEXP (value, 0), subtarget);
5166 return expand_mult (GET_MODE (value), tmp,
5167 force_operand (op2, NULL_RTX),
5168 target, 0);
5169 }
5170
5171 if (binoptab)
5172 {
5173 op2 = XEXP (value, 1);
5174 if (!CONSTANT_P (op2)
5175 && !(GET_CODE (op2) == REG && op2 != subtarget))
5176 subtarget = 0;
5177 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5178 {
5179 binoptab = add_optab;
5180 op2 = negate_rtx (GET_MODE (value), op2);
5181 }
5182
5183 /* Check for an addition with OP2 a constant integer and our first
5184 operand a PLUS of a virtual register and something else. In that
5185 case, we want to emit the sum of the virtual register and the
5186 constant first and then add the other value. This allows virtual
5187 register instantiation to simply modify the constant rather than
5188 creating another one around this addition. */
5189 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5190 && GET_CODE (XEXP (value, 0)) == PLUS
5191 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5192 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5193 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5194 {
5195 rtx temp = expand_binop (GET_MODE (value), binoptab,
5196 XEXP (XEXP (value, 0), 0), op2,
5197 subtarget, 0, OPTAB_LIB_WIDEN);
5198 return expand_binop (GET_MODE (value), binoptab, temp,
5199 force_operand (XEXP (XEXP (value, 0), 1), 0),
5200 target, 0, OPTAB_LIB_WIDEN);
5201 }
5202
5203 tmp = force_operand (XEXP (value, 0), subtarget);
5204 return expand_binop (GET_MODE (value), binoptab, tmp,
5205 force_operand (op2, NULL_RTX),
5206 target, 0, OPTAB_LIB_WIDEN);
5207 /* We give UNSIGNEDP = 0 to expand_binop
5208 because the only operations we are expanding here are signed ones. */
5209 }
5210 return value;
5211 }
5212 \f
5213 /* Subroutine of expand_expr:
5214 save the non-copied parts (LIST) of an expr (LHS), and return a list
5215 which can restore these values to their previous values,
5216 should something modify their storage. */
5217
5218 static tree
5219 save_noncopied_parts (lhs, list)
5220 tree lhs;
5221 tree list;
5222 {
5223 tree tail;
5224 tree parts = 0;
5225
5226 for (tail = list; tail; tail = TREE_CHAIN (tail))
5227 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5228 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5229 else
5230 {
5231 tree part = TREE_VALUE (tail);
5232 tree part_type = TREE_TYPE (part);
5233 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5234 rtx target = assign_temp (part_type, 0, 1, 1);
5235 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5236 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5237 parts = tree_cons (to_be_saved,
5238 build (RTL_EXPR, part_type, NULL_TREE,
5239 (tree) target),
5240 parts);
5241 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5242 }
5243 return parts;
5244 }
5245
5246 /* Subroutine of expand_expr:
5247 record the non-copied parts (LIST) of an expr (LHS), and return a list
5248 which specifies the initial values of these parts. */
5249
5250 static tree
5251 init_noncopied_parts (lhs, list)
5252 tree lhs;
5253 tree list;
5254 {
5255 tree tail;
5256 tree parts = 0;
5257
5258 for (tail = list; tail; tail = TREE_CHAIN (tail))
5259 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5260 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5261 else if (TREE_PURPOSE (tail))
5262 {
5263 tree part = TREE_VALUE (tail);
5264 tree part_type = TREE_TYPE (part);
5265 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5266 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5267 }
5268 return parts;
5269 }
5270
5271 /* Subroutine of expand_expr: return nonzero iff there is no way that
5272 EXP can reference X, which is being modified. TOP_P is nonzero if this
5273 call is going to be used to determine whether we need a temporary
5274 for EXP, as opposed to a recursive call to this function.
5275
5276 It is always safe for this routine to return zero since it merely
5277 searches for optimization opportunities. */
5278
5279 static int
5280 safe_from_p (x, exp, top_p)
5281 rtx x;
5282 tree exp;
5283 int top_p;
5284 {
5285 rtx exp_rtl = 0;
5286 int i, nops;
5287 static int save_expr_count;
5288 static int save_expr_size = 0;
5289 static tree *save_expr_rewritten;
5290 static tree save_expr_trees[256];
5291
5292 if (x == 0
5293 /* If EXP has varying size, we MUST use a target since we currently
5294 have no way of allocating temporaries of variable size
5295 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5296 So we assume here that something at a higher level has prevented a
5297 clash. This is somewhat bogus, but the best we can do. Only
5298 do this when X is BLKmode and when we are at the top level. */
5299 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5300 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5301 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5302 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5303 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5304 != INTEGER_CST)
5305 && GET_MODE (x) == BLKmode))
5306 return 1;
5307
5308 if (top_p && save_expr_size == 0)
5309 {
5310 int rtn;
5311
5312 save_expr_count = 0;
5313 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5314 save_expr_rewritten = &save_expr_trees[0];
5315
5316 rtn = safe_from_p (x, exp, 1);
5317
5318 for (i = 0; i < save_expr_count; ++i)
5319 {
5320 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5321 abort ();
5322 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5323 }
5324
5325 save_expr_size = 0;
5326
5327 return rtn;
5328 }
5329
5330 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5331 find the underlying pseudo. */
5332 if (GET_CODE (x) == SUBREG)
5333 {
5334 x = SUBREG_REG (x);
5335 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5336 return 0;
5337 }
5338
5339 /* If X is a location in the outgoing argument area, it is always safe. */
5340 if (GET_CODE (x) == MEM
5341 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5342 || (GET_CODE (XEXP (x, 0)) == PLUS
5343 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5344 return 1;
5345
5346 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5347 {
5348 case 'd':
5349 exp_rtl = DECL_RTL (exp);
5350 break;
5351
5352 case 'c':
5353 return 1;
5354
5355 case 'x':
5356 if (TREE_CODE (exp) == TREE_LIST)
5357 return ((TREE_VALUE (exp) == 0
5358 || safe_from_p (x, TREE_VALUE (exp), 0))
5359 && (TREE_CHAIN (exp) == 0
5360 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5361 else if (TREE_CODE (exp) == ERROR_MARK)
5362 return 1; /* An already-visited SAVE_EXPR? */
5363 else
5364 return 0;
5365
5366 case '1':
5367 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5368
5369 case '2':
5370 case '<':
5371 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5372 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5373
5374 case 'e':
5375 case 'r':
5376 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5377 the expression. If it is set, we conflict iff we are that rtx or
5378 both are in memory. Otherwise, we check all operands of the
5379 expression recursively. */
5380
5381 switch (TREE_CODE (exp))
5382 {
5383 case ADDR_EXPR:
5384 return (staticp (TREE_OPERAND (exp, 0))
5385 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5386 || TREE_STATIC (exp));
5387
5388 case INDIRECT_REF:
5389 if (GET_CODE (x) == MEM)
5390 return 0;
5391 break;
5392
5393 case CALL_EXPR:
5394 exp_rtl = CALL_EXPR_RTL (exp);
5395 if (exp_rtl == 0)
5396 {
5397 /* Assume that the call will clobber all hard registers and
5398 all of memory. */
5399 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5400 || GET_CODE (x) == MEM)
5401 return 0;
5402 }
5403
5404 break;
5405
5406 case RTL_EXPR:
5407 /* If a sequence exists, we would have to scan every instruction
5408 in the sequence to see if it was safe. This is probably not
5409 worthwhile. */
5410 if (RTL_EXPR_SEQUENCE (exp))
5411 return 0;
5412
5413 exp_rtl = RTL_EXPR_RTL (exp);
5414 break;
5415
5416 case WITH_CLEANUP_EXPR:
5417 exp_rtl = RTL_EXPR_RTL (exp);
5418 break;
5419
5420 case CLEANUP_POINT_EXPR:
5421 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5422
5423 case SAVE_EXPR:
5424 exp_rtl = SAVE_EXPR_RTL (exp);
5425 if (exp_rtl)
5426 break;
5427
5428 /* This SAVE_EXPR might appear many times in the top-level
5429 safe_from_p() expression, and if it has a complex
5430 subexpression, examining it multiple times could result
5431 in a combinatorial explosion. E.g. on an Alpha
5432 running at least 200MHz, a Fortran test case compiled with
5433 optimization took about 28 minutes to compile -- even though
5434 it was only a few lines long, and the complicated line causing
5435 so much time to be spent in the earlier version of safe_from_p()
5436 had only 293 or so unique nodes.
5437
5438 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5439 where it is so we can turn it back in the top-level safe_from_p()
5440 when we're done. */
5441
5442 /* For now, don't bother re-sizing the array. */
5443 if (save_expr_count >= save_expr_size)
5444 return 0;
5445 save_expr_rewritten[save_expr_count++] = exp;
5446
5447 nops = tree_code_length[(int) SAVE_EXPR];
5448 for (i = 0; i < nops; i++)
5449 {
5450 tree operand = TREE_OPERAND (exp, i);
5451 if (operand == NULL_TREE)
5452 continue;
5453 TREE_SET_CODE (exp, ERROR_MARK);
5454 if (!safe_from_p (x, operand, 0))
5455 return 0;
5456 TREE_SET_CODE (exp, SAVE_EXPR);
5457 }
5458 TREE_SET_CODE (exp, ERROR_MARK);
5459 return 1;
5460
5461 case BIND_EXPR:
5462 /* The only operand we look at is operand 1. The rest aren't
5463 part of the expression. */
5464 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5465
5466 case METHOD_CALL_EXPR:
5467 /* This takes a rtx argument, but shouldn't appear here. */
5468 abort ();
5469
5470 default:
5471 break;
5472 }
5473
5474 /* If we have an rtx, we do not need to scan our operands. */
5475 if (exp_rtl)
5476 break;
5477
5478 nops = tree_code_length[(int) TREE_CODE (exp)];
5479 for (i = 0; i < nops; i++)
5480 if (TREE_OPERAND (exp, i) != 0
5481 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5482 return 0;
5483 }
5484
5485 /* If we have an rtl, find any enclosed object. Then see if we conflict
5486 with it. */
5487 if (exp_rtl)
5488 {
5489 if (GET_CODE (exp_rtl) == SUBREG)
5490 {
5491 exp_rtl = SUBREG_REG (exp_rtl);
5492 if (GET_CODE (exp_rtl) == REG
5493 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5494 return 0;
5495 }
5496
5497 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5498 are memory and EXP is not readonly. */
5499 return ! (rtx_equal_p (x, exp_rtl)
5500 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5501 && ! TREE_READONLY (exp)));
5502 }
5503
5504 /* If we reach here, it is safe. */
5505 return 1;
5506 }
5507
5508 /* Subroutine of expand_expr: return nonzero iff EXP is an
5509 expression whose type is statically determinable. */
5510
5511 static int
5512 fixed_type_p (exp)
5513 tree exp;
5514 {
5515 if (TREE_CODE (exp) == PARM_DECL
5516 || TREE_CODE (exp) == VAR_DECL
5517 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5518 || TREE_CODE (exp) == COMPONENT_REF
5519 || TREE_CODE (exp) == ARRAY_REF)
5520 return 1;
5521 return 0;
5522 }
5523
5524 /* Subroutine of expand_expr: return rtx if EXP is a
5525 variable or parameter; else return 0. */
5526
5527 static rtx
5528 var_rtx (exp)
5529 tree exp;
5530 {
5531 STRIP_NOPS (exp);
5532 switch (TREE_CODE (exp))
5533 {
5534 case PARM_DECL:
5535 case VAR_DECL:
5536 return DECL_RTL (exp);
5537 default:
5538 return 0;
5539 }
5540 }
5541
5542 #ifdef MAX_INTEGER_COMPUTATION_MODE
5543 void
5544 check_max_integer_computation_mode (exp)
5545 tree exp;
5546 {
5547 enum tree_code code;
5548 enum machine_mode mode;
5549
5550 /* Strip any NOPs that don't change the mode. */
5551 STRIP_NOPS (exp);
5552 code = TREE_CODE (exp);
5553
5554 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5555 if (code == NOP_EXPR
5556 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5557 return;
5558
5559 /* First check the type of the overall operation. We need only look at
5560 unary, binary and relational operations. */
5561 if (TREE_CODE_CLASS (code) == '1'
5562 || TREE_CODE_CLASS (code) == '2'
5563 || TREE_CODE_CLASS (code) == '<')
5564 {
5565 mode = TYPE_MODE (TREE_TYPE (exp));
5566 if (GET_MODE_CLASS (mode) == MODE_INT
5567 && mode > MAX_INTEGER_COMPUTATION_MODE)
5568 fatal ("unsupported wide integer operation");
5569 }
5570
5571 /* Check operand of a unary op. */
5572 if (TREE_CODE_CLASS (code) == '1')
5573 {
5574 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5575 if (GET_MODE_CLASS (mode) == MODE_INT
5576 && mode > MAX_INTEGER_COMPUTATION_MODE)
5577 fatal ("unsupported wide integer operation");
5578 }
5579
5580 /* Check operands of a binary/comparison op. */
5581 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5582 {
5583 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5584 if (GET_MODE_CLASS (mode) == MODE_INT
5585 && mode > MAX_INTEGER_COMPUTATION_MODE)
5586 fatal ("unsupported wide integer operation");
5587
5588 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5589 if (GET_MODE_CLASS (mode) == MODE_INT
5590 && mode > MAX_INTEGER_COMPUTATION_MODE)
5591 fatal ("unsupported wide integer operation");
5592 }
5593 }
5594 #endif
5595
5596 \f
5597 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5598 has any readonly fields. If any of the fields have types that
5599 contain readonly fields, return true as well. */
5600
5601 static int
5602 readonly_fields_p (type)
5603 tree type;
5604 {
5605 tree field;
5606
5607 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5608 if (TREE_CODE (field) == FIELD_DECL
5609 && (TREE_READONLY (field)
5610 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5611 && readonly_fields_p (TREE_TYPE (field)))))
5612 return 1;
5613
5614 return 0;
5615 }
5616 \f
5617 /* expand_expr: generate code for computing expression EXP.
5618 An rtx for the computed value is returned. The value is never null.
5619 In the case of a void EXP, const0_rtx is returned.
5620
5621 The value may be stored in TARGET if TARGET is nonzero.
5622 TARGET is just a suggestion; callers must assume that
5623 the rtx returned may not be the same as TARGET.
5624
5625 If TARGET is CONST0_RTX, it means that the value will be ignored.
5626
5627 If TMODE is not VOIDmode, it suggests generating the
5628 result in mode TMODE. But this is done only when convenient.
5629 Otherwise, TMODE is ignored and the value generated in its natural mode.
5630 TMODE is just a suggestion; callers must assume that
5631 the rtx returned may not have mode TMODE.
5632
5633 Note that TARGET may have neither TMODE nor MODE. In that case, it
5634 probably will not be used.
5635
5636 If MODIFIER is EXPAND_SUM then when EXP is an addition
5637 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5638 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5639 products as above, or REG or MEM, or constant.
5640 Ordinarily in such cases we would output mul or add instructions
5641 and then return a pseudo reg containing the sum.
5642
5643 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5644 it also marks a label as absolutely required (it can't be dead).
5645 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5646 This is used for outputting expressions used in initializers.
5647
5648 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5649 with a constant address even if that address is not normally legitimate.
5650 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5651
5652 rtx
5653 expand_expr (exp, target, tmode, modifier)
5654 register tree exp;
5655 rtx target;
5656 enum machine_mode tmode;
5657 enum expand_modifier modifier;
5658 {
5659 register rtx op0, op1, temp;
5660 tree type = TREE_TYPE (exp);
5661 int unsignedp = TREE_UNSIGNED (type);
5662 register enum machine_mode mode;
5663 register enum tree_code code = TREE_CODE (exp);
5664 optab this_optab;
5665 rtx subtarget, original_target;
5666 int ignore;
5667 tree context;
5668 /* Used by check-memory-usage to make modifier read only. */
5669 enum expand_modifier ro_modifier;
5670
5671 /* Handle ERROR_MARK before anybody tries to access its type. */
5672 if (TREE_CODE (exp) == ERROR_MARK)
5673 {
5674 op0 = CONST0_RTX (tmode);
5675 if (op0 != 0)
5676 return op0;
5677 return const0_rtx;
5678 }
5679
5680 mode = TYPE_MODE (type);
5681 /* Use subtarget as the target for operand 0 of a binary operation. */
5682 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5683 original_target = target;
5684 ignore = (target == const0_rtx
5685 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5686 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5687 || code == COND_EXPR)
5688 && TREE_CODE (type) == VOID_TYPE));
5689
5690 /* Make a read-only version of the modifier. */
5691 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5692 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5693 ro_modifier = modifier;
5694 else
5695 ro_modifier = EXPAND_NORMAL;
5696
5697 /* Don't use hard regs as subtargets, because the combiner
5698 can only handle pseudo regs. */
5699 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5700 subtarget = 0;
5701 /* Avoid subtargets inside loops,
5702 since they hide some invariant expressions. */
5703 if (preserve_subexpressions_p ())
5704 subtarget = 0;
5705
5706 /* If we are going to ignore this result, we need only do something
5707 if there is a side-effect somewhere in the expression. If there
5708 is, short-circuit the most common cases here. Note that we must
5709 not call expand_expr with anything but const0_rtx in case this
5710 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5711
5712 if (ignore)
5713 {
5714 if (! TREE_SIDE_EFFECTS (exp))
5715 return const0_rtx;
5716
5717 /* Ensure we reference a volatile object even if value is ignored, but
5718 don't do this if all we are doing is taking its address. */
5719 if (TREE_THIS_VOLATILE (exp)
5720 && TREE_CODE (exp) != FUNCTION_DECL
5721 && mode != VOIDmode && mode != BLKmode
5722 && modifier != EXPAND_CONST_ADDRESS)
5723 {
5724 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5725 if (GET_CODE (temp) == MEM)
5726 temp = copy_to_reg (temp);
5727 return const0_rtx;
5728 }
5729
5730 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5731 || code == INDIRECT_REF || code == BUFFER_REF)
5732 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5733 VOIDmode, ro_modifier);
5734 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5735 || code == ARRAY_REF)
5736 {
5737 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5738 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5739 return const0_rtx;
5740 }
5741 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5742 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5743 /* If the second operand has no side effects, just evaluate
5744 the first. */
5745 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5746 VOIDmode, ro_modifier);
5747 else if (code == BIT_FIELD_REF)
5748 {
5749 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5750 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5751 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5752 return const0_rtx;
5753 }
5754 ;
5755 target = 0;
5756 }
5757
5758 #ifdef MAX_INTEGER_COMPUTATION_MODE
5759 /* Only check stuff here if the mode we want is different from the mode
5760 of the expression; if it's the same, check_max_integer_computiation_mode
5761 will handle it. Do we really need to check this stuff at all? */
5762
5763 if (target
5764 && GET_MODE (target) != mode
5765 && TREE_CODE (exp) != INTEGER_CST
5766 && TREE_CODE (exp) != PARM_DECL
5767 && TREE_CODE (exp) != ARRAY_REF
5768 && TREE_CODE (exp) != COMPONENT_REF
5769 && TREE_CODE (exp) != BIT_FIELD_REF
5770 && TREE_CODE (exp) != INDIRECT_REF
5771 && TREE_CODE (exp) != CALL_EXPR
5772 && TREE_CODE (exp) != VAR_DECL
5773 && TREE_CODE (exp) != RTL_EXPR)
5774 {
5775 enum machine_mode mode = GET_MODE (target);
5776
5777 if (GET_MODE_CLASS (mode) == MODE_INT
5778 && mode > MAX_INTEGER_COMPUTATION_MODE)
5779 fatal ("unsupported wide integer operation");
5780 }
5781
5782 if (tmode != mode
5783 && TREE_CODE (exp) != INTEGER_CST
5784 && TREE_CODE (exp) != PARM_DECL
5785 && TREE_CODE (exp) != ARRAY_REF
5786 && TREE_CODE (exp) != COMPONENT_REF
5787 && TREE_CODE (exp) != BIT_FIELD_REF
5788 && TREE_CODE (exp) != INDIRECT_REF
5789 && TREE_CODE (exp) != VAR_DECL
5790 && TREE_CODE (exp) != CALL_EXPR
5791 && TREE_CODE (exp) != RTL_EXPR
5792 && GET_MODE_CLASS (tmode) == MODE_INT
5793 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5794 fatal ("unsupported wide integer operation");
5795
5796 check_max_integer_computation_mode (exp);
5797 #endif
5798
5799 /* If will do cse, generate all results into pseudo registers
5800 since 1) that allows cse to find more things
5801 and 2) otherwise cse could produce an insn the machine
5802 cannot support. */
5803
5804 if (! cse_not_expected && mode != BLKmode && target
5805 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5806 target = subtarget;
5807
5808 switch (code)
5809 {
5810 case LABEL_DECL:
5811 {
5812 tree function = decl_function_context (exp);
5813 /* Handle using a label in a containing function. */
5814 if (function != current_function_decl
5815 && function != inline_function_decl && function != 0)
5816 {
5817 struct function *p = find_function_data (function);
5818 /* Allocate in the memory associated with the function
5819 that the label is in. */
5820 push_obstacks (p->function_obstack,
5821 p->function_maybepermanent_obstack);
5822
5823 p->expr->x_forced_labels
5824 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5825 p->expr->x_forced_labels);
5826 pop_obstacks ();
5827 }
5828 else
5829 {
5830 if (modifier == EXPAND_INITIALIZER)
5831 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5832 label_rtx (exp),
5833 forced_labels);
5834 }
5835
5836 temp = gen_rtx_MEM (FUNCTION_MODE,
5837 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5838 if (function != current_function_decl
5839 && function != inline_function_decl && function != 0)
5840 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5841 return temp;
5842 }
5843
5844 case PARM_DECL:
5845 if (DECL_RTL (exp) == 0)
5846 {
5847 error_with_decl (exp, "prior parameter's size depends on `%s'");
5848 return CONST0_RTX (mode);
5849 }
5850
5851 /* ... fall through ... */
5852
5853 case VAR_DECL:
5854 /* If a static var's type was incomplete when the decl was written,
5855 but the type is complete now, lay out the decl now. */
5856 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5857 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5858 {
5859 push_obstacks_nochange ();
5860 end_temporary_allocation ();
5861 layout_decl (exp, 0);
5862 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5863 pop_obstacks ();
5864 }
5865
5866 /* Although static-storage variables start off initialized, according to
5867 ANSI C, a memcpy could overwrite them with uninitialized values. So
5868 we check them too. This also lets us check for read-only variables
5869 accessed via a non-const declaration, in case it won't be detected
5870 any other way (e.g., in an embedded system or OS kernel without
5871 memory protection).
5872
5873 Aggregates are not checked here; they're handled elsewhere. */
5874 if (cfun && current_function_check_memory_usage
5875 && code == VAR_DECL
5876 && GET_CODE (DECL_RTL (exp)) == MEM
5877 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5878 {
5879 enum memory_use_mode memory_usage;
5880 memory_usage = get_memory_usage_from_modifier (modifier);
5881
5882 if (memory_usage != MEMORY_USE_DONT)
5883 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5884 XEXP (DECL_RTL (exp), 0), Pmode,
5885 GEN_INT (int_size_in_bytes (type)),
5886 TYPE_MODE (sizetype),
5887 GEN_INT (memory_usage),
5888 TYPE_MODE (integer_type_node));
5889 }
5890
5891 /* ... fall through ... */
5892
5893 case FUNCTION_DECL:
5894 case RESULT_DECL:
5895 if (DECL_RTL (exp) == 0)
5896 abort ();
5897
5898 /* Ensure variable marked as used even if it doesn't go through
5899 a parser. If it hasn't be used yet, write out an external
5900 definition. */
5901 if (! TREE_USED (exp))
5902 {
5903 assemble_external (exp);
5904 TREE_USED (exp) = 1;
5905 }
5906
5907 /* Show we haven't gotten RTL for this yet. */
5908 temp = 0;
5909
5910 /* Handle variables inherited from containing functions. */
5911 context = decl_function_context (exp);
5912
5913 /* We treat inline_function_decl as an alias for the current function
5914 because that is the inline function whose vars, types, etc.
5915 are being merged into the current function.
5916 See expand_inline_function. */
5917
5918 if (context != 0 && context != current_function_decl
5919 && context != inline_function_decl
5920 /* If var is static, we don't need a static chain to access it. */
5921 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5922 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5923 {
5924 rtx addr;
5925
5926 /* Mark as non-local and addressable. */
5927 DECL_NONLOCAL (exp) = 1;
5928 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5929 abort ();
5930 mark_addressable (exp);
5931 if (GET_CODE (DECL_RTL (exp)) != MEM)
5932 abort ();
5933 addr = XEXP (DECL_RTL (exp), 0);
5934 if (GET_CODE (addr) == MEM)
5935 addr = gen_rtx_MEM (Pmode,
5936 fix_lexical_addr (XEXP (addr, 0), exp));
5937 else
5938 addr = fix_lexical_addr (addr, exp);
5939 temp = change_address (DECL_RTL (exp), mode, addr);
5940 }
5941
5942 /* This is the case of an array whose size is to be determined
5943 from its initializer, while the initializer is still being parsed.
5944 See expand_decl. */
5945
5946 else if (GET_CODE (DECL_RTL (exp)) == MEM
5947 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5948 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5949 XEXP (DECL_RTL (exp), 0));
5950
5951 /* If DECL_RTL is memory, we are in the normal case and either
5952 the address is not valid or it is not a register and -fforce-addr
5953 is specified, get the address into a register. */
5954
5955 else if (GET_CODE (DECL_RTL (exp)) == MEM
5956 && modifier != EXPAND_CONST_ADDRESS
5957 && modifier != EXPAND_SUM
5958 && modifier != EXPAND_INITIALIZER
5959 && (! memory_address_p (DECL_MODE (exp),
5960 XEXP (DECL_RTL (exp), 0))
5961 || (flag_force_addr
5962 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5963 temp = change_address (DECL_RTL (exp), VOIDmode,
5964 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5965
5966 /* If we got something, return it. But first, set the alignment
5967 the address is a register. */
5968 if (temp != 0)
5969 {
5970 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5971 mark_reg_pointer (XEXP (temp, 0),
5972 DECL_ALIGN (exp) / BITS_PER_UNIT);
5973
5974 return temp;
5975 }
5976
5977 /* If the mode of DECL_RTL does not match that of the decl, it
5978 must be a promoted value. We return a SUBREG of the wanted mode,
5979 but mark it so that we know that it was already extended. */
5980
5981 if (GET_CODE (DECL_RTL (exp)) == REG
5982 && GET_MODE (DECL_RTL (exp)) != mode)
5983 {
5984 /* Get the signedness used for this variable. Ensure we get the
5985 same mode we got when the variable was declared. */
5986 if (GET_MODE (DECL_RTL (exp))
5987 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5988 abort ();
5989
5990 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5991 SUBREG_PROMOTED_VAR_P (temp) = 1;
5992 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5993 return temp;
5994 }
5995
5996 return DECL_RTL (exp);
5997
5998 case INTEGER_CST:
5999 return immed_double_const (TREE_INT_CST_LOW (exp),
6000 TREE_INT_CST_HIGH (exp),
6001 mode);
6002
6003 case CONST_DECL:
6004 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6005 EXPAND_MEMORY_USE_BAD);
6006
6007 case REAL_CST:
6008 /* If optimized, generate immediate CONST_DOUBLE
6009 which will be turned into memory by reload if necessary.
6010
6011 We used to force a register so that loop.c could see it. But
6012 this does not allow gen_* patterns to perform optimizations with
6013 the constants. It also produces two insns in cases like "x = 1.0;".
6014 On most machines, floating-point constants are not permitted in
6015 many insns, so we'd end up copying it to a register in any case.
6016
6017 Now, we do the copying in expand_binop, if appropriate. */
6018 return immed_real_const (exp);
6019
6020 case COMPLEX_CST:
6021 case STRING_CST:
6022 if (! TREE_CST_RTL (exp))
6023 output_constant_def (exp);
6024
6025 /* TREE_CST_RTL probably contains a constant address.
6026 On RISC machines where a constant address isn't valid,
6027 make some insns to get that address into a register. */
6028 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6029 && modifier != EXPAND_CONST_ADDRESS
6030 && modifier != EXPAND_INITIALIZER
6031 && modifier != EXPAND_SUM
6032 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6033 || (flag_force_addr
6034 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6035 return change_address (TREE_CST_RTL (exp), VOIDmode,
6036 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6037 return TREE_CST_RTL (exp);
6038
6039 case EXPR_WITH_FILE_LOCATION:
6040 {
6041 rtx to_return;
6042 char *saved_input_filename = input_filename;
6043 int saved_lineno = lineno;
6044 input_filename = EXPR_WFL_FILENAME (exp);
6045 lineno = EXPR_WFL_LINENO (exp);
6046 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6047 emit_line_note (input_filename, lineno);
6048 /* Possibly avoid switching back and force here */
6049 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6050 input_filename = saved_input_filename;
6051 lineno = saved_lineno;
6052 return to_return;
6053 }
6054
6055 case SAVE_EXPR:
6056 context = decl_function_context (exp);
6057
6058 /* If this SAVE_EXPR was at global context, assume we are an
6059 initialization function and move it into our context. */
6060 if (context == 0)
6061 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6062
6063 /* We treat inline_function_decl as an alias for the current function
6064 because that is the inline function whose vars, types, etc.
6065 are being merged into the current function.
6066 See expand_inline_function. */
6067 if (context == current_function_decl || context == inline_function_decl)
6068 context = 0;
6069
6070 /* If this is non-local, handle it. */
6071 if (context)
6072 {
6073 /* The following call just exists to abort if the context is
6074 not of a containing function. */
6075 find_function_data (context);
6076
6077 temp = SAVE_EXPR_RTL (exp);
6078 if (temp && GET_CODE (temp) == REG)
6079 {
6080 put_var_into_stack (exp);
6081 temp = SAVE_EXPR_RTL (exp);
6082 }
6083 if (temp == 0 || GET_CODE (temp) != MEM)
6084 abort ();
6085 return change_address (temp, mode,
6086 fix_lexical_addr (XEXP (temp, 0), exp));
6087 }
6088 if (SAVE_EXPR_RTL (exp) == 0)
6089 {
6090 if (mode == VOIDmode)
6091 temp = const0_rtx;
6092 else
6093 temp = assign_temp (type, 3, 0, 0);
6094
6095 SAVE_EXPR_RTL (exp) = temp;
6096 if (!optimize && GET_CODE (temp) == REG)
6097 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6098 save_expr_regs);
6099
6100 /* If the mode of TEMP does not match that of the expression, it
6101 must be a promoted value. We pass store_expr a SUBREG of the
6102 wanted mode but mark it so that we know that it was already
6103 extended. Note that `unsignedp' was modified above in
6104 this case. */
6105
6106 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6107 {
6108 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6109 SUBREG_PROMOTED_VAR_P (temp) = 1;
6110 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6111 }
6112
6113 if (temp == const0_rtx)
6114 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6115 EXPAND_MEMORY_USE_BAD);
6116 else
6117 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6118
6119 TREE_USED (exp) = 1;
6120 }
6121
6122 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6123 must be a promoted value. We return a SUBREG of the wanted mode,
6124 but mark it so that we know that it was already extended. */
6125
6126 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6127 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6128 {
6129 /* Compute the signedness and make the proper SUBREG. */
6130 promote_mode (type, mode, &unsignedp, 0);
6131 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6132 SUBREG_PROMOTED_VAR_P (temp) = 1;
6133 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6134 return temp;
6135 }
6136
6137 return SAVE_EXPR_RTL (exp);
6138
6139 case UNSAVE_EXPR:
6140 {
6141 rtx temp;
6142 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6143 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6144 return temp;
6145 }
6146
6147 case PLACEHOLDER_EXPR:
6148 {
6149 tree placeholder_expr;
6150
6151 /* If there is an object on the head of the placeholder list,
6152 see if some object in it of type TYPE or a pointer to it. For
6153 further information, see tree.def. */
6154 for (placeholder_expr = placeholder_list;
6155 placeholder_expr != 0;
6156 placeholder_expr = TREE_CHAIN (placeholder_expr))
6157 {
6158 tree need_type = TYPE_MAIN_VARIANT (type);
6159 tree object = 0;
6160 tree old_list = placeholder_list;
6161 tree elt;
6162
6163 /* Find the outermost reference that is of the type we want.
6164 If none, see if any object has a type that is a pointer to
6165 the type we want. */
6166 for (elt = TREE_PURPOSE (placeholder_expr);
6167 elt != 0 && object == 0;
6168 elt
6169 = ((TREE_CODE (elt) == COMPOUND_EXPR
6170 || TREE_CODE (elt) == COND_EXPR)
6171 ? TREE_OPERAND (elt, 1)
6172 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6173 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6174 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6175 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6176 ? TREE_OPERAND (elt, 0) : 0))
6177 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6178 object = elt;
6179
6180 for (elt = TREE_PURPOSE (placeholder_expr);
6181 elt != 0 && object == 0;
6182 elt
6183 = ((TREE_CODE (elt) == COMPOUND_EXPR
6184 || TREE_CODE (elt) == COND_EXPR)
6185 ? TREE_OPERAND (elt, 1)
6186 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6187 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6188 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6189 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6190 ? TREE_OPERAND (elt, 0) : 0))
6191 if (POINTER_TYPE_P (TREE_TYPE (elt))
6192 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6193 == need_type))
6194 object = build1 (INDIRECT_REF, need_type, elt);
6195
6196 if (object != 0)
6197 {
6198 /* Expand this object skipping the list entries before
6199 it was found in case it is also a PLACEHOLDER_EXPR.
6200 In that case, we want to translate it using subsequent
6201 entries. */
6202 placeholder_list = TREE_CHAIN (placeholder_expr);
6203 temp = expand_expr (object, original_target, tmode,
6204 ro_modifier);
6205 placeholder_list = old_list;
6206 return temp;
6207 }
6208 }
6209 }
6210
6211 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6212 abort ();
6213
6214 case WITH_RECORD_EXPR:
6215 /* Put the object on the placeholder list, expand our first operand,
6216 and pop the list. */
6217 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6218 placeholder_list);
6219 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6220 tmode, ro_modifier);
6221 placeholder_list = TREE_CHAIN (placeholder_list);
6222 return target;
6223
6224 case GOTO_EXPR:
6225 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6226 expand_goto (TREE_OPERAND (exp, 0));
6227 else
6228 expand_computed_goto (TREE_OPERAND (exp, 0));
6229 return const0_rtx;
6230
6231 case EXIT_EXPR:
6232 expand_exit_loop_if_false (NULL_PTR,
6233 invert_truthvalue (TREE_OPERAND (exp, 0)));
6234 return const0_rtx;
6235
6236 case LABELED_BLOCK_EXPR:
6237 if (LABELED_BLOCK_BODY (exp))
6238 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6239 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6240 return const0_rtx;
6241
6242 case EXIT_BLOCK_EXPR:
6243 if (EXIT_BLOCK_RETURN (exp))
6244 sorry ("returned value in block_exit_expr");
6245 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6246 return const0_rtx;
6247
6248 case LOOP_EXPR:
6249 push_temp_slots ();
6250 expand_start_loop (1);
6251 expand_expr_stmt (TREE_OPERAND (exp, 0));
6252 expand_end_loop ();
6253 pop_temp_slots ();
6254
6255 return const0_rtx;
6256
6257 case BIND_EXPR:
6258 {
6259 tree vars = TREE_OPERAND (exp, 0);
6260 int vars_need_expansion = 0;
6261
6262 /* Need to open a binding contour here because
6263 if there are any cleanups they must be contained here. */
6264 expand_start_bindings (2);
6265
6266 /* Mark the corresponding BLOCK for output in its proper place. */
6267 if (TREE_OPERAND (exp, 2) != 0
6268 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6269 insert_block (TREE_OPERAND (exp, 2));
6270
6271 /* If VARS have not yet been expanded, expand them now. */
6272 while (vars)
6273 {
6274 if (DECL_RTL (vars) == 0)
6275 {
6276 vars_need_expansion = 1;
6277 expand_decl (vars);
6278 }
6279 expand_decl_init (vars);
6280 vars = TREE_CHAIN (vars);
6281 }
6282
6283 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6284
6285 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6286
6287 return temp;
6288 }
6289
6290 case RTL_EXPR:
6291 if (RTL_EXPR_SEQUENCE (exp))
6292 {
6293 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6294 abort ();
6295 emit_insns (RTL_EXPR_SEQUENCE (exp));
6296 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6297 }
6298 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6299 free_temps_for_rtl_expr (exp);
6300 return RTL_EXPR_RTL (exp);
6301
6302 case CONSTRUCTOR:
6303 /* If we don't need the result, just ensure we evaluate any
6304 subexpressions. */
6305 if (ignore)
6306 {
6307 tree elt;
6308 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6309 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6310 EXPAND_MEMORY_USE_BAD);
6311 return const0_rtx;
6312 }
6313
6314 /* All elts simple constants => refer to a constant in memory. But
6315 if this is a non-BLKmode mode, let it store a field at a time
6316 since that should make a CONST_INT or CONST_DOUBLE when we
6317 fold. Likewise, if we have a target we can use, it is best to
6318 store directly into the target unless the type is large enough
6319 that memcpy will be used. If we are making an initializer and
6320 all operands are constant, put it in memory as well. */
6321 else if ((TREE_STATIC (exp)
6322 && ((mode == BLKmode
6323 && ! (target != 0 && safe_from_p (target, exp, 1)))
6324 || TREE_ADDRESSABLE (exp)
6325 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6326 && (!MOVE_BY_PIECES_P
6327 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6328 TYPE_ALIGN (type) / BITS_PER_UNIT))
6329 && ! mostly_zeros_p (exp))))
6330 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6331 {
6332 rtx constructor = output_constant_def (exp);
6333 if (modifier != EXPAND_CONST_ADDRESS
6334 && modifier != EXPAND_INITIALIZER
6335 && modifier != EXPAND_SUM
6336 && (! memory_address_p (GET_MODE (constructor),
6337 XEXP (constructor, 0))
6338 || (flag_force_addr
6339 && GET_CODE (XEXP (constructor, 0)) != REG)))
6340 constructor = change_address (constructor, VOIDmode,
6341 XEXP (constructor, 0));
6342 return constructor;
6343 }
6344
6345 else
6346 {
6347 /* Handle calls that pass values in multiple non-contiguous
6348 locations. The Irix 6 ABI has examples of this. */
6349 if (target == 0 || ! safe_from_p (target, exp, 1)
6350 || GET_CODE (target) == PARALLEL)
6351 {
6352 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6353 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6354 else
6355 target = assign_temp (type, 0, 1, 1);
6356 }
6357
6358 if (TREE_READONLY (exp))
6359 {
6360 if (GET_CODE (target) == MEM)
6361 target = copy_rtx (target);
6362
6363 RTX_UNCHANGING_P (target) = 1;
6364 }
6365
6366 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
6367 return target;
6368 }
6369
6370 case INDIRECT_REF:
6371 {
6372 tree exp1 = TREE_OPERAND (exp, 0);
6373 tree exp2;
6374 tree index;
6375 tree string = string_constant (exp1, &index);
6376 int i;
6377
6378 /* Try to optimize reads from const strings. */
6379 if (string
6380 && TREE_CODE (string) == STRING_CST
6381 && TREE_CODE (index) == INTEGER_CST
6382 && !TREE_INT_CST_HIGH (index)
6383 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6384 && GET_MODE_CLASS (mode) == MODE_INT
6385 && GET_MODE_SIZE (mode) == 1
6386 && modifier != EXPAND_MEMORY_USE_WO)
6387 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6388
6389 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6390 op0 = memory_address (mode, op0);
6391
6392 if (cfun && current_function_check_memory_usage
6393 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6394 {
6395 enum memory_use_mode memory_usage;
6396 memory_usage = get_memory_usage_from_modifier (modifier);
6397
6398 if (memory_usage != MEMORY_USE_DONT)
6399 {
6400 in_check_memory_usage = 1;
6401 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6402 op0, Pmode,
6403 GEN_INT (int_size_in_bytes (type)),
6404 TYPE_MODE (sizetype),
6405 GEN_INT (memory_usage),
6406 TYPE_MODE (integer_type_node));
6407 in_check_memory_usage = 0;
6408 }
6409 }
6410
6411 temp = gen_rtx_MEM (mode, op0);
6412 /* If address was computed by addition,
6413 mark this as an element of an aggregate. */
6414 if (TREE_CODE (exp1) == PLUS_EXPR
6415 || (TREE_CODE (exp1) == SAVE_EXPR
6416 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6417 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6418 || (TREE_CODE (exp1) == ADDR_EXPR
6419 && (exp2 = TREE_OPERAND (exp1, 0))
6420 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6421 MEM_SET_IN_STRUCT_P (temp, 1);
6422
6423 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6424 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6425
6426 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6427 here, because, in C and C++, the fact that a location is accessed
6428 through a pointer to const does not mean that the value there can
6429 never change. Languages where it can never change should
6430 also set TREE_STATIC. */
6431 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6432
6433 /* If we are writing to this object and its type is a record with
6434 readonly fields, we must mark it as readonly so it will
6435 conflict with readonly references to those fields. */
6436 if (modifier == EXPAND_MEMORY_USE_WO
6437 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6438 RTX_UNCHANGING_P (temp) = 1;
6439
6440 return temp;
6441 }
6442
6443 case ARRAY_REF:
6444 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6445 abort ();
6446
6447 {
6448 tree array = TREE_OPERAND (exp, 0);
6449 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6450 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6451 tree index = TREE_OPERAND (exp, 1);
6452 tree index_type = TREE_TYPE (index);
6453 HOST_WIDE_INT i;
6454
6455 /* Optimize the special-case of a zero lower bound.
6456
6457 We convert the low_bound to sizetype to avoid some problems
6458 with constant folding. (E.g. suppose the lower bound is 1,
6459 and its mode is QI. Without the conversion, (ARRAY
6460 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6461 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6462
6463 But sizetype isn't quite right either (especially if
6464 the lowbound is negative). FIXME */
6465
6466 if (! integer_zerop (low_bound))
6467 index = fold (build (MINUS_EXPR, index_type, index,
6468 convert (sizetype, low_bound)));
6469
6470 /* Fold an expression like: "foo"[2].
6471 This is not done in fold so it won't happen inside &.
6472 Don't fold if this is for wide characters since it's too
6473 difficult to do correctly and this is a very rare case. */
6474
6475 if (TREE_CODE (array) == STRING_CST
6476 && TREE_CODE (index) == INTEGER_CST
6477 && !TREE_INT_CST_HIGH (index)
6478 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6479 && GET_MODE_CLASS (mode) == MODE_INT
6480 && GET_MODE_SIZE (mode) == 1)
6481 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6482
6483 /* If this is a constant index into a constant array,
6484 just get the value from the array. Handle both the cases when
6485 we have an explicit constructor and when our operand is a variable
6486 that was declared const. */
6487
6488 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6489 {
6490 if (TREE_CODE (index) == INTEGER_CST
6491 && TREE_INT_CST_HIGH (index) == 0)
6492 {
6493 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6494
6495 i = TREE_INT_CST_LOW (index);
6496 while (elem && i--)
6497 elem = TREE_CHAIN (elem);
6498 if (elem)
6499 return expand_expr (fold (TREE_VALUE (elem)), target,
6500 tmode, ro_modifier);
6501 }
6502 }
6503
6504 else if (optimize >= 1
6505 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6506 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6507 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6508 {
6509 if (TREE_CODE (index) == INTEGER_CST)
6510 {
6511 tree init = DECL_INITIAL (array);
6512
6513 i = TREE_INT_CST_LOW (index);
6514 if (TREE_CODE (init) == CONSTRUCTOR)
6515 {
6516 tree elem = CONSTRUCTOR_ELTS (init);
6517
6518 while (elem
6519 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6520 elem = TREE_CHAIN (elem);
6521 if (elem)
6522 return expand_expr (fold (TREE_VALUE (elem)), target,
6523 tmode, ro_modifier);
6524 }
6525 else if (TREE_CODE (init) == STRING_CST
6526 && TREE_INT_CST_HIGH (index) == 0
6527 && (TREE_INT_CST_LOW (index)
6528 < TREE_STRING_LENGTH (init)))
6529 return (GEN_INT
6530 (TREE_STRING_POINTER
6531 (init)[TREE_INT_CST_LOW (index)]));
6532 }
6533 }
6534 }
6535
6536 /* ... fall through ... */
6537
6538 case COMPONENT_REF:
6539 case BIT_FIELD_REF:
6540 /* If the operand is a CONSTRUCTOR, we can just extract the
6541 appropriate field if it is present. Don't do this if we have
6542 already written the data since we want to refer to that copy
6543 and varasm.c assumes that's what we'll do. */
6544 if (code != ARRAY_REF
6545 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6546 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6547 {
6548 tree elt;
6549
6550 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6551 elt = TREE_CHAIN (elt))
6552 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6553 /* We can normally use the value of the field in the
6554 CONSTRUCTOR. However, if this is a bitfield in
6555 an integral mode that we can fit in a HOST_WIDE_INT,
6556 we must mask only the number of bits in the bitfield,
6557 since this is done implicitly by the constructor. If
6558 the bitfield does not meet either of those conditions,
6559 we can't do this optimization. */
6560 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6561 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6562 == MODE_INT)
6563 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6564 <= HOST_BITS_PER_WIDE_INT))))
6565 {
6566 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6567 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6568 {
6569 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6570
6571 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6572 {
6573 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6574 op0 = expand_and (op0, op1, target);
6575 }
6576 else
6577 {
6578 enum machine_mode imode
6579 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6580 tree count
6581 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6582 0);
6583
6584 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6585 target, 0);
6586 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6587 target, 0);
6588 }
6589 }
6590
6591 return op0;
6592 }
6593 }
6594
6595 {
6596 enum machine_mode mode1;
6597 int bitsize;
6598 int bitpos;
6599 tree offset;
6600 int volatilep = 0;
6601 int alignment;
6602 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6603 &mode1, &unsignedp, &volatilep,
6604 &alignment);
6605
6606 /* If we got back the original object, something is wrong. Perhaps
6607 we are evaluating an expression too early. In any event, don't
6608 infinitely recurse. */
6609 if (tem == exp)
6610 abort ();
6611
6612 /* If TEM's type is a union of variable size, pass TARGET to the inner
6613 computation, since it will need a temporary and TARGET is known
6614 to have to do. This occurs in unchecked conversion in Ada. */
6615
6616 op0 = expand_expr (tem,
6617 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6618 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6619 != INTEGER_CST)
6620 ? target : NULL_RTX),
6621 VOIDmode,
6622 (modifier == EXPAND_INITIALIZER
6623 || modifier == EXPAND_CONST_ADDRESS)
6624 ? modifier : EXPAND_NORMAL);
6625
6626 /* If this is a constant, put it into a register if it is a
6627 legitimate constant and OFFSET is 0 and memory if it isn't. */
6628 if (CONSTANT_P (op0))
6629 {
6630 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6631 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6632 && offset == 0)
6633 op0 = force_reg (mode, op0);
6634 else
6635 op0 = validize_mem (force_const_mem (mode, op0));
6636 }
6637
6638 if (offset != 0)
6639 {
6640 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6641
6642 /* If this object is in memory, put it into a register.
6643 This case can't occur in C, but can in Ada if we have
6644 unchecked conversion of an expression from a scalar type to
6645 an array or record type. */
6646 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6647 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6648 {
6649 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6650
6651 mark_temp_addr_taken (memloc);
6652 emit_move_insn (memloc, op0);
6653 op0 = memloc;
6654 }
6655
6656 if (GET_CODE (op0) != MEM)
6657 abort ();
6658
6659 if (GET_MODE (offset_rtx) != ptr_mode)
6660 {
6661 #ifdef POINTERS_EXTEND_UNSIGNED
6662 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6663 #else
6664 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6665 #endif
6666 }
6667
6668 /* A constant address in OP0 can have VOIDmode, we must not try
6669 to call force_reg for that case. Avoid that case. */
6670 if (GET_CODE (op0) == MEM
6671 && GET_MODE (op0) == BLKmode
6672 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6673 && bitsize != 0
6674 && (bitpos % bitsize) == 0
6675 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6676 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6677 {
6678 rtx temp = change_address (op0, mode1,
6679 plus_constant (XEXP (op0, 0),
6680 (bitpos /
6681 BITS_PER_UNIT)));
6682 if (GET_CODE (XEXP (temp, 0)) == REG)
6683 op0 = temp;
6684 else
6685 op0 = change_address (op0, mode1,
6686 force_reg (GET_MODE (XEXP (temp, 0)),
6687 XEXP (temp, 0)));
6688 bitpos = 0;
6689 }
6690
6691
6692 op0 = change_address (op0, VOIDmode,
6693 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6694 force_reg (ptr_mode,
6695 offset_rtx)));
6696 }
6697
6698 /* Don't forget about volatility even if this is a bitfield. */
6699 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6700 {
6701 op0 = copy_rtx (op0);
6702 MEM_VOLATILE_P (op0) = 1;
6703 }
6704
6705 /* Check the access. */
6706 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6707 {
6708 enum memory_use_mode memory_usage;
6709 memory_usage = get_memory_usage_from_modifier (modifier);
6710
6711 if (memory_usage != MEMORY_USE_DONT)
6712 {
6713 rtx to;
6714 int size;
6715
6716 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6717 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6718
6719 /* Check the access right of the pointer. */
6720 if (size > BITS_PER_UNIT)
6721 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6722 to, Pmode,
6723 GEN_INT (size / BITS_PER_UNIT),
6724 TYPE_MODE (sizetype),
6725 GEN_INT (memory_usage),
6726 TYPE_MODE (integer_type_node));
6727 }
6728 }
6729
6730 /* In cases where an aligned union has an unaligned object
6731 as a field, we might be extracting a BLKmode value from
6732 an integer-mode (e.g., SImode) object. Handle this case
6733 by doing the extract into an object as wide as the field
6734 (which we know to be the width of a basic mode), then
6735 storing into memory, and changing the mode to BLKmode.
6736 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6737 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6738 if (mode1 == VOIDmode
6739 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6740 || (modifier != EXPAND_CONST_ADDRESS
6741 && modifier != EXPAND_INITIALIZER
6742 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6743 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6744 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6745 /* If the field isn't aligned enough to fetch as a memref,
6746 fetch it as a bit field. */
6747 || (mode1 != BLKmode && SLOW_UNALIGNED_ACCESS
6748 && ((TYPE_ALIGN (TREE_TYPE (tem))
6749 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6750 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
6751 || (modifier != EXPAND_CONST_ADDRESS
6752 && modifier != EXPAND_INITIALIZER
6753 && mode == BLKmode
6754 && SLOW_UNALIGNED_ACCESS
6755 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6756 || bitpos % TYPE_ALIGN (type) != 0)))
6757 {
6758 enum machine_mode ext_mode = mode;
6759
6760 if (ext_mode == BLKmode
6761 && ! (target != 0 && GET_CODE (op0) == MEM
6762 && GET_CODE (target) == MEM
6763 && bitpos % BITS_PER_UNIT == 0))
6764 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6765
6766 if (ext_mode == BLKmode)
6767 {
6768 /* In this case, BITPOS must start at a byte boundary and
6769 TARGET, if specified, must be a MEM. */
6770 if (GET_CODE (op0) != MEM
6771 || (target != 0 && GET_CODE (target) != MEM)
6772 || bitpos % BITS_PER_UNIT != 0)
6773 abort ();
6774
6775 op0 = change_address (op0, VOIDmode,
6776 plus_constant (XEXP (op0, 0),
6777 bitpos / BITS_PER_UNIT));
6778 if (target == 0)
6779 target = assign_temp (type, 0, 1, 1);
6780
6781 emit_block_move (target, op0,
6782 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6783 / BITS_PER_UNIT),
6784 1);
6785
6786 return target;
6787 }
6788
6789 op0 = validize_mem (op0);
6790
6791 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6792 mark_reg_pointer (XEXP (op0, 0), alignment);
6793
6794 op0 = extract_bit_field (op0, bitsize, bitpos,
6795 unsignedp, target, ext_mode, ext_mode,
6796 alignment,
6797 int_size_in_bytes (TREE_TYPE (tem)));
6798
6799 /* If the result is a record type and BITSIZE is narrower than
6800 the mode of OP0, an integral mode, and this is a big endian
6801 machine, we must put the field into the high-order bits. */
6802 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6803 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6804 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6805 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6806 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6807 - bitsize),
6808 op0, 1);
6809
6810 if (mode == BLKmode)
6811 {
6812 rtx new = assign_stack_temp (ext_mode,
6813 bitsize / BITS_PER_UNIT, 0);
6814
6815 emit_move_insn (new, op0);
6816 op0 = copy_rtx (new);
6817 PUT_MODE (op0, BLKmode);
6818 MEM_SET_IN_STRUCT_P (op0, 1);
6819 }
6820
6821 return op0;
6822 }
6823
6824 /* If the result is BLKmode, use that to access the object
6825 now as well. */
6826 if (mode == BLKmode)
6827 mode1 = BLKmode;
6828
6829 /* Get a reference to just this component. */
6830 if (modifier == EXPAND_CONST_ADDRESS
6831 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6832 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6833 (bitpos / BITS_PER_UNIT)));
6834 else
6835 op0 = change_address (op0, mode1,
6836 plus_constant (XEXP (op0, 0),
6837 (bitpos / BITS_PER_UNIT)));
6838
6839 if (GET_CODE (op0) == MEM)
6840 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6841
6842 if (GET_CODE (XEXP (op0, 0)) == REG)
6843 mark_reg_pointer (XEXP (op0, 0), alignment);
6844
6845 MEM_SET_IN_STRUCT_P (op0, 1);
6846 MEM_VOLATILE_P (op0) |= volatilep;
6847 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6848 || modifier == EXPAND_CONST_ADDRESS
6849 || modifier == EXPAND_INITIALIZER)
6850 return op0;
6851 else if (target == 0)
6852 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6853
6854 convert_move (target, op0, unsignedp);
6855 return target;
6856 }
6857
6858 /* Intended for a reference to a buffer of a file-object in Pascal.
6859 But it's not certain that a special tree code will really be
6860 necessary for these. INDIRECT_REF might work for them. */
6861 case BUFFER_REF:
6862 abort ();
6863
6864 case IN_EXPR:
6865 {
6866 /* Pascal set IN expression.
6867
6868 Algorithm:
6869 rlo = set_low - (set_low%bits_per_word);
6870 the_word = set [ (index - rlo)/bits_per_word ];
6871 bit_index = index % bits_per_word;
6872 bitmask = 1 << bit_index;
6873 return !!(the_word & bitmask); */
6874
6875 tree set = TREE_OPERAND (exp, 0);
6876 tree index = TREE_OPERAND (exp, 1);
6877 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6878 tree set_type = TREE_TYPE (set);
6879 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6880 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6881 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6882 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6883 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6884 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6885 rtx setaddr = XEXP (setval, 0);
6886 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6887 rtx rlow;
6888 rtx diff, quo, rem, addr, bit, result;
6889
6890 preexpand_calls (exp);
6891
6892 /* If domain is empty, answer is no. Likewise if index is constant
6893 and out of bounds. */
6894 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6895 && TREE_CODE (set_low_bound) == INTEGER_CST
6896 && tree_int_cst_lt (set_high_bound, set_low_bound))
6897 || (TREE_CODE (index) == INTEGER_CST
6898 && TREE_CODE (set_low_bound) == INTEGER_CST
6899 && tree_int_cst_lt (index, set_low_bound))
6900 || (TREE_CODE (set_high_bound) == INTEGER_CST
6901 && TREE_CODE (index) == INTEGER_CST
6902 && tree_int_cst_lt (set_high_bound, index))))
6903 return const0_rtx;
6904
6905 if (target == 0)
6906 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6907
6908 /* If we get here, we have to generate the code for both cases
6909 (in range and out of range). */
6910
6911 op0 = gen_label_rtx ();
6912 op1 = gen_label_rtx ();
6913
6914 if (! (GET_CODE (index_val) == CONST_INT
6915 && GET_CODE (lo_r) == CONST_INT))
6916 {
6917 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6918 GET_MODE (index_val), iunsignedp, 0, op1);
6919 }
6920
6921 if (! (GET_CODE (index_val) == CONST_INT
6922 && GET_CODE (hi_r) == CONST_INT))
6923 {
6924 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6925 GET_MODE (index_val), iunsignedp, 0, op1);
6926 }
6927
6928 /* Calculate the element number of bit zero in the first word
6929 of the set. */
6930 if (GET_CODE (lo_r) == CONST_INT)
6931 rlow = GEN_INT (INTVAL (lo_r)
6932 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6933 else
6934 rlow = expand_binop (index_mode, and_optab, lo_r,
6935 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6936 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6937
6938 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6939 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6940
6941 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6942 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6943 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6944 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6945
6946 addr = memory_address (byte_mode,
6947 expand_binop (index_mode, add_optab, diff,
6948 setaddr, NULL_RTX, iunsignedp,
6949 OPTAB_LIB_WIDEN));
6950
6951 /* Extract the bit we want to examine */
6952 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6953 gen_rtx_MEM (byte_mode, addr),
6954 make_tree (TREE_TYPE (index), rem),
6955 NULL_RTX, 1);
6956 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6957 GET_MODE (target) == byte_mode ? target : 0,
6958 1, OPTAB_LIB_WIDEN);
6959
6960 if (result != target)
6961 convert_move (target, result, 1);
6962
6963 /* Output the code to handle the out-of-range case. */
6964 emit_jump (op0);
6965 emit_label (op1);
6966 emit_move_insn (target, const0_rtx);
6967 emit_label (op0);
6968 return target;
6969 }
6970
6971 case WITH_CLEANUP_EXPR:
6972 if (RTL_EXPR_RTL (exp) == 0)
6973 {
6974 RTL_EXPR_RTL (exp)
6975 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6976 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6977
6978 /* That's it for this cleanup. */
6979 TREE_OPERAND (exp, 2) = 0;
6980 }
6981 return RTL_EXPR_RTL (exp);
6982
6983 case CLEANUP_POINT_EXPR:
6984 {
6985 /* Start a new binding layer that will keep track of all cleanup
6986 actions to be performed. */
6987 expand_start_bindings (2);
6988
6989 target_temp_slot_level = temp_slot_level;
6990
6991 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6992 /* If we're going to use this value, load it up now. */
6993 if (! ignore)
6994 op0 = force_not_mem (op0);
6995 preserve_temp_slots (op0);
6996 expand_end_bindings (NULL_TREE, 0, 0);
6997 }
6998 return op0;
6999
7000 case CALL_EXPR:
7001 /* Check for a built-in function. */
7002 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7003 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7004 == FUNCTION_DECL)
7005 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7006 return expand_builtin (exp, target, subtarget, tmode, ignore);
7007
7008 /* If this call was expanded already by preexpand_calls,
7009 just return the result we got. */
7010 if (CALL_EXPR_RTL (exp) != 0)
7011 return CALL_EXPR_RTL (exp);
7012
7013 return expand_call (exp, target, ignore);
7014
7015 case NON_LVALUE_EXPR:
7016 case NOP_EXPR:
7017 case CONVERT_EXPR:
7018 case REFERENCE_EXPR:
7019 if (TREE_CODE (type) == UNION_TYPE)
7020 {
7021 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7022
7023 /* If both input and output are BLKmode, this conversion
7024 isn't actually doing anything unless we need to make the
7025 alignment stricter. */
7026 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7027 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7028 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7029 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7030 modifier);
7031
7032 if (target == 0)
7033 {
7034 if (mode != BLKmode)
7035 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7036 else
7037 target = assign_temp (type, 0, 1, 1);
7038 }
7039
7040 if (GET_CODE (target) == MEM)
7041 /* Store data into beginning of memory target. */
7042 store_expr (TREE_OPERAND (exp, 0),
7043 change_address (target, TYPE_MODE (valtype), 0), 0);
7044
7045 else if (GET_CODE (target) == REG)
7046 /* Store this field into a union of the proper type. */
7047 store_field (target,
7048 MIN ((int_size_in_bytes (TREE_TYPE
7049 (TREE_OPERAND (exp, 0)))
7050 * BITS_PER_UNIT),
7051 GET_MODE_BITSIZE (mode)),
7052 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7053 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7054 else
7055 abort ();
7056
7057 /* Return the entire union. */
7058 return target;
7059 }
7060
7061 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7062 {
7063 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7064 ro_modifier);
7065
7066 /* If the signedness of the conversion differs and OP0 is
7067 a promoted SUBREG, clear that indication since we now
7068 have to do the proper extension. */
7069 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7070 && GET_CODE (op0) == SUBREG)
7071 SUBREG_PROMOTED_VAR_P (op0) = 0;
7072
7073 return op0;
7074 }
7075
7076 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7077 if (GET_MODE (op0) == mode)
7078 return op0;
7079
7080 /* If OP0 is a constant, just convert it into the proper mode. */
7081 if (CONSTANT_P (op0))
7082 return
7083 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7084 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7085
7086 if (modifier == EXPAND_INITIALIZER)
7087 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7088
7089 if (target == 0)
7090 return
7091 convert_to_mode (mode, op0,
7092 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7093 else
7094 convert_move (target, op0,
7095 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7096 return target;
7097
7098 case PLUS_EXPR:
7099 /* We come here from MINUS_EXPR when the second operand is a
7100 constant. */
7101 plus_expr:
7102 this_optab = add_optab;
7103
7104 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7105 something else, make sure we add the register to the constant and
7106 then to the other thing. This case can occur during strength
7107 reduction and doing it this way will produce better code if the
7108 frame pointer or argument pointer is eliminated.
7109
7110 fold-const.c will ensure that the constant is always in the inner
7111 PLUS_EXPR, so the only case we need to do anything about is if
7112 sp, ap, or fp is our second argument, in which case we must swap
7113 the innermost first argument and our second argument. */
7114
7115 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7116 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7117 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7118 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7119 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7120 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7121 {
7122 tree t = TREE_OPERAND (exp, 1);
7123
7124 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7125 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7126 }
7127
7128 /* If the result is to be ptr_mode and we are adding an integer to
7129 something, we might be forming a constant. So try to use
7130 plus_constant. If it produces a sum and we can't accept it,
7131 use force_operand. This allows P = &ARR[const] to generate
7132 efficient code on machines where a SYMBOL_REF is not a valid
7133 address.
7134
7135 If this is an EXPAND_SUM call, always return the sum. */
7136 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7137 || mode == ptr_mode)
7138 {
7139 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7140 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7141 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7142 {
7143 rtx constant_part;
7144
7145 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7146 EXPAND_SUM);
7147 /* Use immed_double_const to ensure that the constant is
7148 truncated according to the mode of OP1, then sign extended
7149 to a HOST_WIDE_INT. Using the constant directly can result
7150 in non-canonical RTL in a 64x32 cross compile. */
7151 constant_part
7152 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7153 (HOST_WIDE_INT) 0,
7154 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7155 op1 = plus_constant (op1, INTVAL (constant_part));
7156 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7157 op1 = force_operand (op1, target);
7158 return op1;
7159 }
7160
7161 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7162 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7163 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7164 {
7165 rtx constant_part;
7166
7167 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7168 EXPAND_SUM);
7169 if (! CONSTANT_P (op0))
7170 {
7171 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7172 VOIDmode, modifier);
7173 /* Don't go to both_summands if modifier
7174 says it's not right to return a PLUS. */
7175 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7176 goto binop2;
7177 goto both_summands;
7178 }
7179 /* Use immed_double_const to ensure that the constant is
7180 truncated according to the mode of OP1, then sign extended
7181 to a HOST_WIDE_INT. Using the constant directly can result
7182 in non-canonical RTL in a 64x32 cross compile. */
7183 constant_part
7184 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7185 (HOST_WIDE_INT) 0,
7186 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7187 op0 = plus_constant (op0, INTVAL (constant_part));
7188 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7189 op0 = force_operand (op0, target);
7190 return op0;
7191 }
7192 }
7193
7194 /* No sense saving up arithmetic to be done
7195 if it's all in the wrong mode to form part of an address.
7196 And force_operand won't know whether to sign-extend or
7197 zero-extend. */
7198 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7199 || mode != ptr_mode)
7200 goto binop;
7201
7202 preexpand_calls (exp);
7203 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7204 subtarget = 0;
7205
7206 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7207 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7208
7209 both_summands:
7210 /* Make sure any term that's a sum with a constant comes last. */
7211 if (GET_CODE (op0) == PLUS
7212 && CONSTANT_P (XEXP (op0, 1)))
7213 {
7214 temp = op0;
7215 op0 = op1;
7216 op1 = temp;
7217 }
7218 /* If adding to a sum including a constant,
7219 associate it to put the constant outside. */
7220 if (GET_CODE (op1) == PLUS
7221 && CONSTANT_P (XEXP (op1, 1)))
7222 {
7223 rtx constant_term = const0_rtx;
7224
7225 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7226 if (temp != 0)
7227 op0 = temp;
7228 /* Ensure that MULT comes first if there is one. */
7229 else if (GET_CODE (op0) == MULT)
7230 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7231 else
7232 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7233
7234 /* Let's also eliminate constants from op0 if possible. */
7235 op0 = eliminate_constant_term (op0, &constant_term);
7236
7237 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7238 their sum should be a constant. Form it into OP1, since the
7239 result we want will then be OP0 + OP1. */
7240
7241 temp = simplify_binary_operation (PLUS, mode, constant_term,
7242 XEXP (op1, 1));
7243 if (temp != 0)
7244 op1 = temp;
7245 else
7246 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7247 }
7248
7249 /* Put a constant term last and put a multiplication first. */
7250 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7251 temp = op1, op1 = op0, op0 = temp;
7252
7253 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7254 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7255
7256 case MINUS_EXPR:
7257 /* For initializers, we are allowed to return a MINUS of two
7258 symbolic constants. Here we handle all cases when both operands
7259 are constant. */
7260 /* Handle difference of two symbolic constants,
7261 for the sake of an initializer. */
7262 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7263 && really_constant_p (TREE_OPERAND (exp, 0))
7264 && really_constant_p (TREE_OPERAND (exp, 1)))
7265 {
7266 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7267 VOIDmode, ro_modifier);
7268 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7269 VOIDmode, ro_modifier);
7270
7271 /* If the last operand is a CONST_INT, use plus_constant of
7272 the negated constant. Else make the MINUS. */
7273 if (GET_CODE (op1) == CONST_INT)
7274 return plus_constant (op0, - INTVAL (op1));
7275 else
7276 return gen_rtx_MINUS (mode, op0, op1);
7277 }
7278 /* Convert A - const to A + (-const). */
7279 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7280 {
7281 tree negated = fold (build1 (NEGATE_EXPR, type,
7282 TREE_OPERAND (exp, 1)));
7283
7284 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7285 /* If we can't negate the constant in TYPE, leave it alone and
7286 expand_binop will negate it for us. We used to try to do it
7287 here in the signed version of TYPE, but that doesn't work
7288 on POINTER_TYPEs. */;
7289 else
7290 {
7291 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7292 goto plus_expr;
7293 }
7294 }
7295 this_optab = sub_optab;
7296 goto binop;
7297
7298 case MULT_EXPR:
7299 preexpand_calls (exp);
7300 /* If first operand is constant, swap them.
7301 Thus the following special case checks need only
7302 check the second operand. */
7303 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7304 {
7305 register tree t1 = TREE_OPERAND (exp, 0);
7306 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7307 TREE_OPERAND (exp, 1) = t1;
7308 }
7309
7310 /* Attempt to return something suitable for generating an
7311 indexed address, for machines that support that. */
7312
7313 if (modifier == EXPAND_SUM && mode == ptr_mode
7314 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7315 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7316 {
7317 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7318 EXPAND_SUM);
7319
7320 /* Apply distributive law if OP0 is x+c. */
7321 if (GET_CODE (op0) == PLUS
7322 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7323 return
7324 gen_rtx_PLUS
7325 (mode,
7326 gen_rtx_MULT
7327 (mode, XEXP (op0, 0),
7328 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7329 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7330 * INTVAL (XEXP (op0, 1))));
7331
7332 if (GET_CODE (op0) != REG)
7333 op0 = force_operand (op0, NULL_RTX);
7334 if (GET_CODE (op0) != REG)
7335 op0 = copy_to_mode_reg (mode, op0);
7336
7337 return
7338 gen_rtx_MULT (mode, op0,
7339 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7340 }
7341
7342 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7343 subtarget = 0;
7344
7345 /* Check for multiplying things that have been extended
7346 from a narrower type. If this machine supports multiplying
7347 in that narrower type with a result in the desired type,
7348 do it that way, and avoid the explicit type-conversion. */
7349 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7350 && TREE_CODE (type) == INTEGER_TYPE
7351 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7352 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7353 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7354 && int_fits_type_p (TREE_OPERAND (exp, 1),
7355 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7356 /* Don't use a widening multiply if a shift will do. */
7357 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7358 > HOST_BITS_PER_WIDE_INT)
7359 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7360 ||
7361 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7362 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7363 ==
7364 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7365 /* If both operands are extended, they must either both
7366 be zero-extended or both be sign-extended. */
7367 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7368 ==
7369 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7370 {
7371 enum machine_mode innermode
7372 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7373 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7374 ? smul_widen_optab : umul_widen_optab);
7375 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7376 ? umul_widen_optab : smul_widen_optab);
7377 if (mode == GET_MODE_WIDER_MODE (innermode))
7378 {
7379 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7380 {
7381 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7382 NULL_RTX, VOIDmode, 0);
7383 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7384 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7385 VOIDmode, 0);
7386 else
7387 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7388 NULL_RTX, VOIDmode, 0);
7389 goto binop2;
7390 }
7391 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7392 && innermode == word_mode)
7393 {
7394 rtx htem;
7395 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7396 NULL_RTX, VOIDmode, 0);
7397 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7398 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7399 VOIDmode, 0);
7400 else
7401 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7402 NULL_RTX, VOIDmode, 0);
7403 temp = expand_binop (mode, other_optab, op0, op1, target,
7404 unsignedp, OPTAB_LIB_WIDEN);
7405 htem = expand_mult_highpart_adjust (innermode,
7406 gen_highpart (innermode, temp),
7407 op0, op1,
7408 gen_highpart (innermode, temp),
7409 unsignedp);
7410 emit_move_insn (gen_highpart (innermode, temp), htem);
7411 return temp;
7412 }
7413 }
7414 }
7415 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7416 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7417 return expand_mult (mode, op0, op1, target, unsignedp);
7418
7419 case TRUNC_DIV_EXPR:
7420 case FLOOR_DIV_EXPR:
7421 case CEIL_DIV_EXPR:
7422 case ROUND_DIV_EXPR:
7423 case EXACT_DIV_EXPR:
7424 preexpand_calls (exp);
7425 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7426 subtarget = 0;
7427 /* Possible optimization: compute the dividend with EXPAND_SUM
7428 then if the divisor is constant can optimize the case
7429 where some terms of the dividend have coeffs divisible by it. */
7430 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7431 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7432 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7433
7434 case RDIV_EXPR:
7435 this_optab = flodiv_optab;
7436 goto binop;
7437
7438 case TRUNC_MOD_EXPR:
7439 case FLOOR_MOD_EXPR:
7440 case CEIL_MOD_EXPR:
7441 case ROUND_MOD_EXPR:
7442 preexpand_calls (exp);
7443 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7444 subtarget = 0;
7445 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7446 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7447 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7448
7449 case FIX_ROUND_EXPR:
7450 case FIX_FLOOR_EXPR:
7451 case FIX_CEIL_EXPR:
7452 abort (); /* Not used for C. */
7453
7454 case FIX_TRUNC_EXPR:
7455 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7456 if (target == 0)
7457 target = gen_reg_rtx (mode);
7458 expand_fix (target, op0, unsignedp);
7459 return target;
7460
7461 case FLOAT_EXPR:
7462 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7463 if (target == 0)
7464 target = gen_reg_rtx (mode);
7465 /* expand_float can't figure out what to do if FROM has VOIDmode.
7466 So give it the correct mode. With -O, cse will optimize this. */
7467 if (GET_MODE (op0) == VOIDmode)
7468 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7469 op0);
7470 expand_float (target, op0,
7471 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7472 return target;
7473
7474 case NEGATE_EXPR:
7475 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7476 temp = expand_unop (mode, neg_optab, op0, target, 0);
7477 if (temp == 0)
7478 abort ();
7479 return temp;
7480
7481 case ABS_EXPR:
7482 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7483
7484 /* Handle complex values specially. */
7485 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7486 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7487 return expand_complex_abs (mode, op0, target, unsignedp);
7488
7489 /* Unsigned abs is simply the operand. Testing here means we don't
7490 risk generating incorrect code below. */
7491 if (TREE_UNSIGNED (type))
7492 return op0;
7493
7494 return expand_abs (mode, op0, target,
7495 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7496
7497 case MAX_EXPR:
7498 case MIN_EXPR:
7499 target = original_target;
7500 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7501 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7502 || GET_MODE (target) != mode
7503 || (GET_CODE (target) == REG
7504 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7505 target = gen_reg_rtx (mode);
7506 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7507 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7508
7509 /* First try to do it with a special MIN or MAX instruction.
7510 If that does not win, use a conditional jump to select the proper
7511 value. */
7512 this_optab = (TREE_UNSIGNED (type)
7513 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7514 : (code == MIN_EXPR ? smin_optab : smax_optab));
7515
7516 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7517 OPTAB_WIDEN);
7518 if (temp != 0)
7519 return temp;
7520
7521 /* At this point, a MEM target is no longer useful; we will get better
7522 code without it. */
7523
7524 if (GET_CODE (target) == MEM)
7525 target = gen_reg_rtx (mode);
7526
7527 if (target != op0)
7528 emit_move_insn (target, op0);
7529
7530 op0 = gen_label_rtx ();
7531
7532 /* If this mode is an integer too wide to compare properly,
7533 compare word by word. Rely on cse to optimize constant cases. */
7534 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode, ccp_jump))
7535 {
7536 if (code == MAX_EXPR)
7537 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7538 target, op1, NULL_RTX, op0);
7539 else
7540 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7541 op1, target, NULL_RTX, op0);
7542 }
7543 else
7544 {
7545 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7546 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7547 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7548 op0);
7549 }
7550 emit_move_insn (target, op1);
7551 emit_label (op0);
7552 return target;
7553
7554 case BIT_NOT_EXPR:
7555 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7556 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7557 if (temp == 0)
7558 abort ();
7559 return temp;
7560
7561 case FFS_EXPR:
7562 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7563 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7564 if (temp == 0)
7565 abort ();
7566 return temp;
7567
7568 /* ??? Can optimize bitwise operations with one arg constant.
7569 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7570 and (a bitwise1 b) bitwise2 b (etc)
7571 but that is probably not worth while. */
7572
7573 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7574 boolean values when we want in all cases to compute both of them. In
7575 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7576 as actual zero-or-1 values and then bitwise anding. In cases where
7577 there cannot be any side effects, better code would be made by
7578 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7579 how to recognize those cases. */
7580
7581 case TRUTH_AND_EXPR:
7582 case BIT_AND_EXPR:
7583 this_optab = and_optab;
7584 goto binop;
7585
7586 case TRUTH_OR_EXPR:
7587 case BIT_IOR_EXPR:
7588 this_optab = ior_optab;
7589 goto binop;
7590
7591 case TRUTH_XOR_EXPR:
7592 case BIT_XOR_EXPR:
7593 this_optab = xor_optab;
7594 goto binop;
7595
7596 case LSHIFT_EXPR:
7597 case RSHIFT_EXPR:
7598 case LROTATE_EXPR:
7599 case RROTATE_EXPR:
7600 preexpand_calls (exp);
7601 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7602 subtarget = 0;
7603 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7604 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7605 unsignedp);
7606
7607 /* Could determine the answer when only additive constants differ. Also,
7608 the addition of one can be handled by changing the condition. */
7609 case LT_EXPR:
7610 case LE_EXPR:
7611 case GT_EXPR:
7612 case GE_EXPR:
7613 case EQ_EXPR:
7614 case NE_EXPR:
7615 preexpand_calls (exp);
7616 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7617 if (temp != 0)
7618 return temp;
7619
7620 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7621 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7622 && original_target
7623 && GET_CODE (original_target) == REG
7624 && (GET_MODE (original_target)
7625 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7626 {
7627 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7628 VOIDmode, 0);
7629
7630 if (temp != original_target)
7631 temp = copy_to_reg (temp);
7632
7633 op1 = gen_label_rtx ();
7634 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7635 GET_MODE (temp), unsignedp, 0, op1);
7636 emit_move_insn (temp, const1_rtx);
7637 emit_label (op1);
7638 return temp;
7639 }
7640
7641 /* If no set-flag instruction, must generate a conditional
7642 store into a temporary variable. Drop through
7643 and handle this like && and ||. */
7644
7645 case TRUTH_ANDIF_EXPR:
7646 case TRUTH_ORIF_EXPR:
7647 if (! ignore
7648 && (target == 0 || ! safe_from_p (target, exp, 1)
7649 /* Make sure we don't have a hard reg (such as function's return
7650 value) live across basic blocks, if not optimizing. */
7651 || (!optimize && GET_CODE (target) == REG
7652 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7653 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7654
7655 if (target)
7656 emit_clr_insn (target);
7657
7658 op1 = gen_label_rtx ();
7659 jumpifnot (exp, op1);
7660
7661 if (target)
7662 emit_0_to_1_insn (target);
7663
7664 emit_label (op1);
7665 return ignore ? const0_rtx : target;
7666
7667 case TRUTH_NOT_EXPR:
7668 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7669 /* The parser is careful to generate TRUTH_NOT_EXPR
7670 only with operands that are always zero or one. */
7671 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7672 target, 1, OPTAB_LIB_WIDEN);
7673 if (temp == 0)
7674 abort ();
7675 return temp;
7676
7677 case COMPOUND_EXPR:
7678 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7679 emit_queue ();
7680 return expand_expr (TREE_OPERAND (exp, 1),
7681 (ignore ? const0_rtx : target),
7682 VOIDmode, 0);
7683
7684 case COND_EXPR:
7685 /* If we would have a "singleton" (see below) were it not for a
7686 conversion in each arm, bring that conversion back out. */
7687 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7688 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7689 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7690 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7691 {
7692 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7693 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7694
7695 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7696 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7697 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7698 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7699 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7700 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7701 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7702 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7703 return expand_expr (build1 (NOP_EXPR, type,
7704 build (COND_EXPR, TREE_TYPE (true),
7705 TREE_OPERAND (exp, 0),
7706 true, false)),
7707 target, tmode, modifier);
7708 }
7709
7710 {
7711 /* Note that COND_EXPRs whose type is a structure or union
7712 are required to be constructed to contain assignments of
7713 a temporary variable, so that we can evaluate them here
7714 for side effect only. If type is void, we must do likewise. */
7715
7716 /* If an arm of the branch requires a cleanup,
7717 only that cleanup is performed. */
7718
7719 tree singleton = 0;
7720 tree binary_op = 0, unary_op = 0;
7721
7722 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7723 convert it to our mode, if necessary. */
7724 if (integer_onep (TREE_OPERAND (exp, 1))
7725 && integer_zerop (TREE_OPERAND (exp, 2))
7726 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7727 {
7728 if (ignore)
7729 {
7730 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7731 ro_modifier);
7732 return const0_rtx;
7733 }
7734
7735 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7736 if (GET_MODE (op0) == mode)
7737 return op0;
7738
7739 if (target == 0)
7740 target = gen_reg_rtx (mode);
7741 convert_move (target, op0, unsignedp);
7742 return target;
7743 }
7744
7745 /* Check for X ? A + B : A. If we have this, we can copy A to the
7746 output and conditionally add B. Similarly for unary operations.
7747 Don't do this if X has side-effects because those side effects
7748 might affect A or B and the "?" operation is a sequence point in
7749 ANSI. (operand_equal_p tests for side effects.) */
7750
7751 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7752 && operand_equal_p (TREE_OPERAND (exp, 2),
7753 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7754 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7755 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7756 && operand_equal_p (TREE_OPERAND (exp, 1),
7757 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7758 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7759 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7760 && operand_equal_p (TREE_OPERAND (exp, 2),
7761 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7762 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7763 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7764 && operand_equal_p (TREE_OPERAND (exp, 1),
7765 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7766 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7767
7768 /* If we are not to produce a result, we have no target. Otherwise,
7769 if a target was specified use it; it will not be used as an
7770 intermediate target unless it is safe. If no target, use a
7771 temporary. */
7772
7773 if (ignore)
7774 temp = 0;
7775 else if (original_target
7776 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7777 || (singleton && GET_CODE (original_target) == REG
7778 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7779 && original_target == var_rtx (singleton)))
7780 && GET_MODE (original_target) == mode
7781 #ifdef HAVE_conditional_move
7782 && (! can_conditionally_move_p (mode)
7783 || GET_CODE (original_target) == REG
7784 || TREE_ADDRESSABLE (type))
7785 #endif
7786 && ! (GET_CODE (original_target) == MEM
7787 && MEM_VOLATILE_P (original_target)))
7788 temp = original_target;
7789 else if (TREE_ADDRESSABLE (type))
7790 abort ();
7791 else
7792 temp = assign_temp (type, 0, 0, 1);
7793
7794 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7795 do the test of X as a store-flag operation, do this as
7796 A + ((X != 0) << log C). Similarly for other simple binary
7797 operators. Only do for C == 1 if BRANCH_COST is low. */
7798 if (temp && singleton && binary_op
7799 && (TREE_CODE (binary_op) == PLUS_EXPR
7800 || TREE_CODE (binary_op) == MINUS_EXPR
7801 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7802 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7803 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7804 : integer_onep (TREE_OPERAND (binary_op, 1)))
7805 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7806 {
7807 rtx result;
7808 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7809 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7810 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7811 : xor_optab);
7812
7813 /* If we had X ? A : A + 1, do this as A + (X == 0).
7814
7815 We have to invert the truth value here and then put it
7816 back later if do_store_flag fails. We cannot simply copy
7817 TREE_OPERAND (exp, 0) to another variable and modify that
7818 because invert_truthvalue can modify the tree pointed to
7819 by its argument. */
7820 if (singleton == TREE_OPERAND (exp, 1))
7821 TREE_OPERAND (exp, 0)
7822 = invert_truthvalue (TREE_OPERAND (exp, 0));
7823
7824 result = do_store_flag (TREE_OPERAND (exp, 0),
7825 (safe_from_p (temp, singleton, 1)
7826 ? temp : NULL_RTX),
7827 mode, BRANCH_COST <= 1);
7828
7829 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7830 result = expand_shift (LSHIFT_EXPR, mode, result,
7831 build_int_2 (tree_log2
7832 (TREE_OPERAND
7833 (binary_op, 1)),
7834 0),
7835 (safe_from_p (temp, singleton, 1)
7836 ? temp : NULL_RTX), 0);
7837
7838 if (result)
7839 {
7840 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7841 return expand_binop (mode, boptab, op1, result, temp,
7842 unsignedp, OPTAB_LIB_WIDEN);
7843 }
7844 else if (singleton == TREE_OPERAND (exp, 1))
7845 TREE_OPERAND (exp, 0)
7846 = invert_truthvalue (TREE_OPERAND (exp, 0));
7847 }
7848
7849 do_pending_stack_adjust ();
7850 NO_DEFER_POP;
7851 op0 = gen_label_rtx ();
7852
7853 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7854 {
7855 if (temp != 0)
7856 {
7857 /* If the target conflicts with the other operand of the
7858 binary op, we can't use it. Also, we can't use the target
7859 if it is a hard register, because evaluating the condition
7860 might clobber it. */
7861 if ((binary_op
7862 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7863 || (GET_CODE (temp) == REG
7864 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7865 temp = gen_reg_rtx (mode);
7866 store_expr (singleton, temp, 0);
7867 }
7868 else
7869 expand_expr (singleton,
7870 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7871 if (singleton == TREE_OPERAND (exp, 1))
7872 jumpif (TREE_OPERAND (exp, 0), op0);
7873 else
7874 jumpifnot (TREE_OPERAND (exp, 0), op0);
7875
7876 start_cleanup_deferral ();
7877 if (binary_op && temp == 0)
7878 /* Just touch the other operand. */
7879 expand_expr (TREE_OPERAND (binary_op, 1),
7880 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7881 else if (binary_op)
7882 store_expr (build (TREE_CODE (binary_op), type,
7883 make_tree (type, temp),
7884 TREE_OPERAND (binary_op, 1)),
7885 temp, 0);
7886 else
7887 store_expr (build1 (TREE_CODE (unary_op), type,
7888 make_tree (type, temp)),
7889 temp, 0);
7890 op1 = op0;
7891 }
7892 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7893 comparison operator. If we have one of these cases, set the
7894 output to A, branch on A (cse will merge these two references),
7895 then set the output to FOO. */
7896 else if (temp
7897 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7898 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7899 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7900 TREE_OPERAND (exp, 1), 0)
7901 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7902 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7903 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7904 {
7905 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7906 temp = gen_reg_rtx (mode);
7907 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7908 jumpif (TREE_OPERAND (exp, 0), op0);
7909
7910 start_cleanup_deferral ();
7911 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7912 op1 = op0;
7913 }
7914 else if (temp
7915 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7916 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7917 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7918 TREE_OPERAND (exp, 2), 0)
7919 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7920 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7921 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7922 {
7923 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7924 temp = gen_reg_rtx (mode);
7925 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7926 jumpifnot (TREE_OPERAND (exp, 0), op0);
7927
7928 start_cleanup_deferral ();
7929 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7930 op1 = op0;
7931 }
7932 else
7933 {
7934 op1 = gen_label_rtx ();
7935 jumpifnot (TREE_OPERAND (exp, 0), op0);
7936
7937 start_cleanup_deferral ();
7938
7939 /* One branch of the cond can be void, if it never returns. For
7940 example A ? throw : E */
7941 if (temp != 0
7942 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7943 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7944 else
7945 expand_expr (TREE_OPERAND (exp, 1),
7946 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7947 end_cleanup_deferral ();
7948 emit_queue ();
7949 emit_jump_insn (gen_jump (op1));
7950 emit_barrier ();
7951 emit_label (op0);
7952 start_cleanup_deferral ();
7953 if (temp != 0
7954 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7955 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7956 else
7957 expand_expr (TREE_OPERAND (exp, 2),
7958 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7959 }
7960
7961 end_cleanup_deferral ();
7962
7963 emit_queue ();
7964 emit_label (op1);
7965 OK_DEFER_POP;
7966
7967 return temp;
7968 }
7969
7970 case TARGET_EXPR:
7971 {
7972 /* Something needs to be initialized, but we didn't know
7973 where that thing was when building the tree. For example,
7974 it could be the return value of a function, or a parameter
7975 to a function which lays down in the stack, or a temporary
7976 variable which must be passed by reference.
7977
7978 We guarantee that the expression will either be constructed
7979 or copied into our original target. */
7980
7981 tree slot = TREE_OPERAND (exp, 0);
7982 tree cleanups = NULL_TREE;
7983 tree exp1;
7984
7985 if (TREE_CODE (slot) != VAR_DECL)
7986 abort ();
7987
7988 if (! ignore)
7989 target = original_target;
7990
7991 /* Set this here so that if we get a target that refers to a
7992 register variable that's already been used, put_reg_into_stack
7993 knows that it should fix up those uses. */
7994 TREE_USED (slot) = 1;
7995
7996 if (target == 0)
7997 {
7998 if (DECL_RTL (slot) != 0)
7999 {
8000 target = DECL_RTL (slot);
8001 /* If we have already expanded the slot, so don't do
8002 it again. (mrs) */
8003 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8004 return target;
8005 }
8006 else
8007 {
8008 target = assign_temp (type, 2, 0, 1);
8009 /* All temp slots at this level must not conflict. */
8010 preserve_temp_slots (target);
8011 DECL_RTL (slot) = target;
8012 if (TREE_ADDRESSABLE (slot))
8013 {
8014 TREE_ADDRESSABLE (slot) = 0;
8015 mark_addressable (slot);
8016 }
8017
8018 /* Since SLOT is not known to the called function
8019 to belong to its stack frame, we must build an explicit
8020 cleanup. This case occurs when we must build up a reference
8021 to pass the reference as an argument. In this case,
8022 it is very likely that such a reference need not be
8023 built here. */
8024
8025 if (TREE_OPERAND (exp, 2) == 0)
8026 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8027 cleanups = TREE_OPERAND (exp, 2);
8028 }
8029 }
8030 else
8031 {
8032 /* This case does occur, when expanding a parameter which
8033 needs to be constructed on the stack. The target
8034 is the actual stack address that we want to initialize.
8035 The function we call will perform the cleanup in this case. */
8036
8037 /* If we have already assigned it space, use that space,
8038 not target that we were passed in, as our target
8039 parameter is only a hint. */
8040 if (DECL_RTL (slot) != 0)
8041 {
8042 target = DECL_RTL (slot);
8043 /* If we have already expanded the slot, so don't do
8044 it again. (mrs) */
8045 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8046 return target;
8047 }
8048 else
8049 {
8050 DECL_RTL (slot) = target;
8051 /* If we must have an addressable slot, then make sure that
8052 the RTL that we just stored in slot is OK. */
8053 if (TREE_ADDRESSABLE (slot))
8054 {
8055 TREE_ADDRESSABLE (slot) = 0;
8056 mark_addressable (slot);
8057 }
8058 }
8059 }
8060
8061 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8062 /* Mark it as expanded. */
8063 TREE_OPERAND (exp, 1) = NULL_TREE;
8064
8065 store_expr (exp1, target, 0);
8066
8067 expand_decl_cleanup (NULL_TREE, cleanups);
8068
8069 return target;
8070 }
8071
8072 case INIT_EXPR:
8073 {
8074 tree lhs = TREE_OPERAND (exp, 0);
8075 tree rhs = TREE_OPERAND (exp, 1);
8076 tree noncopied_parts = 0;
8077 tree lhs_type = TREE_TYPE (lhs);
8078
8079 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8080 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8081 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8082 TYPE_NONCOPIED_PARTS (lhs_type));
8083 while (noncopied_parts != 0)
8084 {
8085 expand_assignment (TREE_VALUE (noncopied_parts),
8086 TREE_PURPOSE (noncopied_parts), 0, 0);
8087 noncopied_parts = TREE_CHAIN (noncopied_parts);
8088 }
8089 return temp;
8090 }
8091
8092 case MODIFY_EXPR:
8093 {
8094 /* If lhs is complex, expand calls in rhs before computing it.
8095 That's so we don't compute a pointer and save it over a call.
8096 If lhs is simple, compute it first so we can give it as a
8097 target if the rhs is just a call. This avoids an extra temp and copy
8098 and that prevents a partial-subsumption which makes bad code.
8099 Actually we could treat component_ref's of vars like vars. */
8100
8101 tree lhs = TREE_OPERAND (exp, 0);
8102 tree rhs = TREE_OPERAND (exp, 1);
8103 tree noncopied_parts = 0;
8104 tree lhs_type = TREE_TYPE (lhs);
8105
8106 temp = 0;
8107
8108 if (TREE_CODE (lhs) != VAR_DECL
8109 && TREE_CODE (lhs) != RESULT_DECL
8110 && TREE_CODE (lhs) != PARM_DECL
8111 && ! (TREE_CODE (lhs) == INDIRECT_REF
8112 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8113 preexpand_calls (exp);
8114
8115 /* Check for |= or &= of a bitfield of size one into another bitfield
8116 of size 1. In this case, (unless we need the result of the
8117 assignment) we can do this more efficiently with a
8118 test followed by an assignment, if necessary.
8119
8120 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8121 things change so we do, this code should be enhanced to
8122 support it. */
8123 if (ignore
8124 && TREE_CODE (lhs) == COMPONENT_REF
8125 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8126 || TREE_CODE (rhs) == BIT_AND_EXPR)
8127 && TREE_OPERAND (rhs, 0) == lhs
8128 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8129 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8130 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8131 {
8132 rtx label = gen_label_rtx ();
8133
8134 do_jump (TREE_OPERAND (rhs, 1),
8135 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8136 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8137 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8138 (TREE_CODE (rhs) == BIT_IOR_EXPR
8139 ? integer_one_node
8140 : integer_zero_node)),
8141 0, 0);
8142 do_pending_stack_adjust ();
8143 emit_label (label);
8144 return const0_rtx;
8145 }
8146
8147 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8148 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8149 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8150 TYPE_NONCOPIED_PARTS (lhs_type));
8151
8152 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8153 while (noncopied_parts != 0)
8154 {
8155 expand_assignment (TREE_PURPOSE (noncopied_parts),
8156 TREE_VALUE (noncopied_parts), 0, 0);
8157 noncopied_parts = TREE_CHAIN (noncopied_parts);
8158 }
8159 return temp;
8160 }
8161
8162 case RETURN_EXPR:
8163 if (!TREE_OPERAND (exp, 0))
8164 expand_null_return ();
8165 else
8166 expand_return (TREE_OPERAND (exp, 0));
8167 return const0_rtx;
8168
8169 case PREINCREMENT_EXPR:
8170 case PREDECREMENT_EXPR:
8171 return expand_increment (exp, 0, ignore);
8172
8173 case POSTINCREMENT_EXPR:
8174 case POSTDECREMENT_EXPR:
8175 /* Faster to treat as pre-increment if result is not used. */
8176 return expand_increment (exp, ! ignore, ignore);
8177
8178 case ADDR_EXPR:
8179 /* If nonzero, TEMP will be set to the address of something that might
8180 be a MEM corresponding to a stack slot. */
8181 temp = 0;
8182
8183 /* Are we taking the address of a nested function? */
8184 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8185 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8186 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8187 && ! TREE_STATIC (exp))
8188 {
8189 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8190 op0 = force_operand (op0, target);
8191 }
8192 /* If we are taking the address of something erroneous, just
8193 return a zero. */
8194 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8195 return const0_rtx;
8196 else
8197 {
8198 /* We make sure to pass const0_rtx down if we came in with
8199 ignore set, to avoid doing the cleanups twice for something. */
8200 op0 = expand_expr (TREE_OPERAND (exp, 0),
8201 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8202 (modifier == EXPAND_INITIALIZER
8203 ? modifier : EXPAND_CONST_ADDRESS));
8204
8205 /* If we are going to ignore the result, OP0 will have been set
8206 to const0_rtx, so just return it. Don't get confused and
8207 think we are taking the address of the constant. */
8208 if (ignore)
8209 return op0;
8210
8211 op0 = protect_from_queue (op0, 0);
8212
8213 /* We would like the object in memory. If it is a constant, we can
8214 have it be statically allocated into memory. For a non-constant,
8215 we need to allocate some memory and store the value into it. */
8216
8217 if (CONSTANT_P (op0))
8218 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8219 op0);
8220 else if (GET_CODE (op0) == MEM)
8221 {
8222 mark_temp_addr_taken (op0);
8223 temp = XEXP (op0, 0);
8224 }
8225
8226 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8227 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8228 {
8229 /* If this object is in a register, it must be not
8230 be BLKmode. */
8231 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8232 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8233
8234 mark_temp_addr_taken (memloc);
8235 emit_move_insn (memloc, op0);
8236 op0 = memloc;
8237 }
8238
8239 if (GET_CODE (op0) != MEM)
8240 abort ();
8241
8242 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8243 {
8244 temp = XEXP (op0, 0);
8245 #ifdef POINTERS_EXTEND_UNSIGNED
8246 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8247 && mode == ptr_mode)
8248 temp = convert_memory_address (ptr_mode, temp);
8249 #endif
8250 return temp;
8251 }
8252
8253 op0 = force_operand (XEXP (op0, 0), target);
8254 }
8255
8256 if (flag_force_addr && GET_CODE (op0) != REG)
8257 op0 = force_reg (Pmode, op0);
8258
8259 if (GET_CODE (op0) == REG
8260 && ! REG_USERVAR_P (op0))
8261 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8262
8263 /* If we might have had a temp slot, add an equivalent address
8264 for it. */
8265 if (temp != 0)
8266 update_temp_slot_address (temp, op0);
8267
8268 #ifdef POINTERS_EXTEND_UNSIGNED
8269 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8270 && mode == ptr_mode)
8271 op0 = convert_memory_address (ptr_mode, op0);
8272 #endif
8273
8274 return op0;
8275
8276 case ENTRY_VALUE_EXPR:
8277 abort ();
8278
8279 /* COMPLEX type for Extended Pascal & Fortran */
8280 case COMPLEX_EXPR:
8281 {
8282 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8283 rtx insns;
8284
8285 /* Get the rtx code of the operands. */
8286 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8287 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8288
8289 if (! target)
8290 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8291
8292 start_sequence ();
8293
8294 /* Move the real (op0) and imaginary (op1) parts to their location. */
8295 emit_move_insn (gen_realpart (mode, target), op0);
8296 emit_move_insn (gen_imagpart (mode, target), op1);
8297
8298 insns = get_insns ();
8299 end_sequence ();
8300
8301 /* Complex construction should appear as a single unit. */
8302 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8303 each with a separate pseudo as destination.
8304 It's not correct for flow to treat them as a unit. */
8305 if (GET_CODE (target) != CONCAT)
8306 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8307 else
8308 emit_insns (insns);
8309
8310 return target;
8311 }
8312
8313 case REALPART_EXPR:
8314 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8315 return gen_realpart (mode, op0);
8316
8317 case IMAGPART_EXPR:
8318 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8319 return gen_imagpart (mode, op0);
8320
8321 case CONJ_EXPR:
8322 {
8323 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8324 rtx imag_t;
8325 rtx insns;
8326
8327 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8328
8329 if (! target)
8330 target = gen_reg_rtx (mode);
8331
8332 start_sequence ();
8333
8334 /* Store the realpart and the negated imagpart to target. */
8335 emit_move_insn (gen_realpart (partmode, target),
8336 gen_realpart (partmode, op0));
8337
8338 imag_t = gen_imagpart (partmode, target);
8339 temp = expand_unop (partmode, neg_optab,
8340 gen_imagpart (partmode, op0), imag_t, 0);
8341 if (temp != imag_t)
8342 emit_move_insn (imag_t, temp);
8343
8344 insns = get_insns ();
8345 end_sequence ();
8346
8347 /* Conjugate should appear as a single unit
8348 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8349 each with a separate pseudo as destination.
8350 It's not correct for flow to treat them as a unit. */
8351 if (GET_CODE (target) != CONCAT)
8352 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8353 else
8354 emit_insns (insns);
8355
8356 return target;
8357 }
8358
8359 case TRY_CATCH_EXPR:
8360 {
8361 tree handler = TREE_OPERAND (exp, 1);
8362
8363 expand_eh_region_start ();
8364
8365 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8366
8367 expand_eh_region_end (handler);
8368
8369 return op0;
8370 }
8371
8372 case TRY_FINALLY_EXPR:
8373 {
8374 tree try_block = TREE_OPERAND (exp, 0);
8375 tree finally_block = TREE_OPERAND (exp, 1);
8376 rtx finally_label = gen_label_rtx ();
8377 rtx done_label = gen_label_rtx ();
8378 rtx return_link = gen_reg_rtx (Pmode);
8379 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8380 (tree) finally_label, (tree) return_link);
8381 TREE_SIDE_EFFECTS (cleanup) = 1;
8382
8383 /* Start a new binding layer that will keep track of all cleanup
8384 actions to be performed. */
8385 expand_start_bindings (2);
8386
8387 target_temp_slot_level = temp_slot_level;
8388
8389 expand_decl_cleanup (NULL_TREE, cleanup);
8390 op0 = expand_expr (try_block, target, tmode, modifier);
8391
8392 preserve_temp_slots (op0);
8393 expand_end_bindings (NULL_TREE, 0, 0);
8394 emit_jump (done_label);
8395 emit_label (finally_label);
8396 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8397 emit_indirect_jump (return_link);
8398 emit_label (done_label);
8399 return op0;
8400 }
8401
8402 case GOTO_SUBROUTINE_EXPR:
8403 {
8404 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8405 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8406 rtx return_address = gen_label_rtx ();
8407 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8408 emit_jump (subr);
8409 emit_label (return_address);
8410 return const0_rtx;
8411 }
8412
8413 case POPDCC_EXPR:
8414 {
8415 rtx dcc = get_dynamic_cleanup_chain ();
8416 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8417 return const0_rtx;
8418 }
8419
8420 case POPDHC_EXPR:
8421 {
8422 rtx dhc = get_dynamic_handler_chain ();
8423 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8424 return const0_rtx;
8425 }
8426
8427 case VA_ARG_EXPR:
8428 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8429
8430 default:
8431 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8432 }
8433
8434 /* Here to do an ordinary binary operator, generating an instruction
8435 from the optab already placed in `this_optab'. */
8436 binop:
8437 preexpand_calls (exp);
8438 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8439 subtarget = 0;
8440 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8441 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8442 binop2:
8443 temp = expand_binop (mode, this_optab, op0, op1, target,
8444 unsignedp, OPTAB_LIB_WIDEN);
8445 if (temp == 0)
8446 abort ();
8447 return temp;
8448 }
8449 \f
8450 /* Similar to expand_expr, except that we don't specify a target, target
8451 mode, or modifier and we return the alignment of the inner type. This is
8452 used in cases where it is not necessary to align the result to the
8453 alignment of its type as long as we know the alignment of the result, for
8454 example for comparisons of BLKmode values. */
8455
8456 static rtx
8457 expand_expr_unaligned (exp, palign)
8458 register tree exp;
8459 int *palign;
8460 {
8461 register rtx op0;
8462 tree type = TREE_TYPE (exp);
8463 register enum machine_mode mode = TYPE_MODE (type);
8464
8465 /* Default the alignment we return to that of the type. */
8466 *palign = TYPE_ALIGN (type);
8467
8468 /* The only cases in which we do anything special is if the resulting mode
8469 is BLKmode. */
8470 if (mode != BLKmode)
8471 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8472
8473 switch (TREE_CODE (exp))
8474 {
8475 case CONVERT_EXPR:
8476 case NOP_EXPR:
8477 case NON_LVALUE_EXPR:
8478 /* Conversions between BLKmode values don't change the underlying
8479 alignment or value. */
8480 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8481 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8482 break;
8483
8484 case ARRAY_REF:
8485 /* Much of the code for this case is copied directly from expand_expr.
8486 We need to duplicate it here because we will do something different
8487 in the fall-through case, so we need to handle the same exceptions
8488 it does. */
8489 {
8490 tree array = TREE_OPERAND (exp, 0);
8491 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8492 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8493 tree index = TREE_OPERAND (exp, 1);
8494 tree index_type = TREE_TYPE (index);
8495 HOST_WIDE_INT i;
8496
8497 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8498 abort ();
8499
8500 /* Optimize the special-case of a zero lower bound.
8501
8502 We convert the low_bound to sizetype to avoid some problems
8503 with constant folding. (E.g. suppose the lower bound is 1,
8504 and its mode is QI. Without the conversion, (ARRAY
8505 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8506 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
8507
8508 But sizetype isn't quite right either (especially if
8509 the lowbound is negative). FIXME */
8510
8511 if (! integer_zerop (low_bound))
8512 index = fold (build (MINUS_EXPR, index_type, index,
8513 convert (sizetype, low_bound)));
8514
8515 /* If this is a constant index into a constant array,
8516 just get the value from the array. Handle both the cases when
8517 we have an explicit constructor and when our operand is a variable
8518 that was declared const. */
8519
8520 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
8521 {
8522 if (TREE_CODE (index) == INTEGER_CST
8523 && TREE_INT_CST_HIGH (index) == 0)
8524 {
8525 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
8526
8527 i = TREE_INT_CST_LOW (index);
8528 while (elem && i--)
8529 elem = TREE_CHAIN (elem);
8530 if (elem)
8531 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8532 palign);
8533 }
8534 }
8535
8536 else if (optimize >= 1
8537 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8538 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8539 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8540 {
8541 if (TREE_CODE (index) == INTEGER_CST)
8542 {
8543 tree init = DECL_INITIAL (array);
8544
8545 i = TREE_INT_CST_LOW (index);
8546 if (TREE_CODE (init) == CONSTRUCTOR)
8547 {
8548 tree elem = CONSTRUCTOR_ELTS (init);
8549
8550 while (elem
8551 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
8552 elem = TREE_CHAIN (elem);
8553 if (elem)
8554 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8555 palign);
8556 }
8557 }
8558 }
8559 }
8560
8561 /* ... fall through ... */
8562
8563 case COMPONENT_REF:
8564 case BIT_FIELD_REF:
8565 /* If the operand is a CONSTRUCTOR, we can just extract the
8566 appropriate field if it is present. Don't do this if we have
8567 already written the data since we want to refer to that copy
8568 and varasm.c assumes that's what we'll do. */
8569 if (TREE_CODE (exp) != ARRAY_REF
8570 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8571 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8572 {
8573 tree elt;
8574
8575 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8576 elt = TREE_CHAIN (elt))
8577 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8578 /* Note that unlike the case in expand_expr, we know this is
8579 BLKmode and hence not an integer. */
8580 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8581 }
8582
8583 {
8584 enum machine_mode mode1;
8585 int bitsize;
8586 int bitpos;
8587 tree offset;
8588 int volatilep = 0;
8589 int alignment;
8590 int unsignedp;
8591 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8592 &mode1, &unsignedp, &volatilep,
8593 &alignment);
8594
8595 /* If we got back the original object, something is wrong. Perhaps
8596 we are evaluating an expression too early. In any event, don't
8597 infinitely recurse. */
8598 if (tem == exp)
8599 abort ();
8600
8601 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8602
8603 /* If this is a constant, put it into a register if it is a
8604 legitimate constant and OFFSET is 0 and memory if it isn't. */
8605 if (CONSTANT_P (op0))
8606 {
8607 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8608
8609 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8610 && offset == 0)
8611 op0 = force_reg (inner_mode, op0);
8612 else
8613 op0 = validize_mem (force_const_mem (inner_mode, op0));
8614 }
8615
8616 if (offset != 0)
8617 {
8618 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8619
8620 /* If this object is in a register, put it into memory.
8621 This case can't occur in C, but can in Ada if we have
8622 unchecked conversion of an expression from a scalar type to
8623 an array or record type. */
8624 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8625 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8626 {
8627 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8628
8629 mark_temp_addr_taken (memloc);
8630 emit_move_insn (memloc, op0);
8631 op0 = memloc;
8632 }
8633
8634 if (GET_CODE (op0) != MEM)
8635 abort ();
8636
8637 if (GET_MODE (offset_rtx) != ptr_mode)
8638 {
8639 #ifdef POINTERS_EXTEND_UNSIGNED
8640 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8641 #else
8642 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8643 #endif
8644 }
8645
8646 op0 = change_address (op0, VOIDmode,
8647 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8648 force_reg (ptr_mode,
8649 offset_rtx)));
8650 }
8651
8652 /* Don't forget about volatility even if this is a bitfield. */
8653 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8654 {
8655 op0 = copy_rtx (op0);
8656 MEM_VOLATILE_P (op0) = 1;
8657 }
8658
8659 /* Check the access. */
8660 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8661 {
8662 rtx to;
8663 int size;
8664
8665 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8666 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8667
8668 /* Check the access right of the pointer. */
8669 if (size > BITS_PER_UNIT)
8670 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8671 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8672 TYPE_MODE (sizetype),
8673 GEN_INT (MEMORY_USE_RO),
8674 TYPE_MODE (integer_type_node));
8675 }
8676
8677 /* In cases where an aligned union has an unaligned object
8678 as a field, we might be extracting a BLKmode value from
8679 an integer-mode (e.g., SImode) object. Handle this case
8680 by doing the extract into an object as wide as the field
8681 (which we know to be the width of a basic mode), then
8682 storing into memory, and changing the mode to BLKmode.
8683 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8684 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8685 if (mode1 == VOIDmode
8686 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8687 || (SLOW_UNALIGNED_ACCESS
8688 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8689 || bitpos % TYPE_ALIGN (type) != 0)))
8690 {
8691 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8692
8693 if (ext_mode == BLKmode)
8694 {
8695 /* In this case, BITPOS must start at a byte boundary. */
8696 if (GET_CODE (op0) != MEM
8697 || bitpos % BITS_PER_UNIT != 0)
8698 abort ();
8699
8700 op0 = change_address (op0, VOIDmode,
8701 plus_constant (XEXP (op0, 0),
8702 bitpos / BITS_PER_UNIT));
8703 }
8704 else
8705 {
8706 rtx new = assign_stack_temp (ext_mode,
8707 bitsize / BITS_PER_UNIT, 0);
8708
8709 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8710 unsignedp, NULL_RTX, ext_mode,
8711 ext_mode, alignment,
8712 int_size_in_bytes (TREE_TYPE (tem)));
8713
8714 /* If the result is a record type and BITSIZE is narrower than
8715 the mode of OP0, an integral mode, and this is a big endian
8716 machine, we must put the field into the high-order bits. */
8717 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8718 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8719 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8720 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8721 size_int (GET_MODE_BITSIZE
8722 (GET_MODE (op0))
8723 - bitsize),
8724 op0, 1);
8725
8726
8727 emit_move_insn (new, op0);
8728 op0 = copy_rtx (new);
8729 PUT_MODE (op0, BLKmode);
8730 }
8731 }
8732 else
8733 /* Get a reference to just this component. */
8734 op0 = change_address (op0, mode1,
8735 plus_constant (XEXP (op0, 0),
8736 (bitpos / BITS_PER_UNIT)));
8737
8738 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8739
8740 /* Adjust the alignment in case the bit position is not
8741 a multiple of the alignment of the inner object. */
8742 while (bitpos % alignment != 0)
8743 alignment >>= 1;
8744
8745 if (GET_CODE (XEXP (op0, 0)) == REG)
8746 mark_reg_pointer (XEXP (op0, 0), alignment);
8747
8748 MEM_IN_STRUCT_P (op0) = 1;
8749 MEM_VOLATILE_P (op0) |= volatilep;
8750
8751 *palign = alignment;
8752 return op0;
8753 }
8754
8755 default:
8756 break;
8757
8758 }
8759
8760 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8761 }
8762 \f
8763 /* Return the tree node and offset if a given argument corresponds to
8764 a string constant. */
8765
8766 tree
8767 string_constant (arg, ptr_offset)
8768 tree arg;
8769 tree *ptr_offset;
8770 {
8771 STRIP_NOPS (arg);
8772
8773 if (TREE_CODE (arg) == ADDR_EXPR
8774 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8775 {
8776 *ptr_offset = integer_zero_node;
8777 return TREE_OPERAND (arg, 0);
8778 }
8779 else if (TREE_CODE (arg) == PLUS_EXPR)
8780 {
8781 tree arg0 = TREE_OPERAND (arg, 0);
8782 tree arg1 = TREE_OPERAND (arg, 1);
8783
8784 STRIP_NOPS (arg0);
8785 STRIP_NOPS (arg1);
8786
8787 if (TREE_CODE (arg0) == ADDR_EXPR
8788 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8789 {
8790 *ptr_offset = arg1;
8791 return TREE_OPERAND (arg0, 0);
8792 }
8793 else if (TREE_CODE (arg1) == ADDR_EXPR
8794 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8795 {
8796 *ptr_offset = arg0;
8797 return TREE_OPERAND (arg1, 0);
8798 }
8799 }
8800
8801 return 0;
8802 }
8803 \f
8804 /* Expand code for a post- or pre- increment or decrement
8805 and return the RTX for the result.
8806 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8807
8808 static rtx
8809 expand_increment (exp, post, ignore)
8810 register tree exp;
8811 int post, ignore;
8812 {
8813 register rtx op0, op1;
8814 register rtx temp, value;
8815 register tree incremented = TREE_OPERAND (exp, 0);
8816 optab this_optab = add_optab;
8817 int icode;
8818 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8819 int op0_is_copy = 0;
8820 int single_insn = 0;
8821 /* 1 means we can't store into OP0 directly,
8822 because it is a subreg narrower than a word,
8823 and we don't dare clobber the rest of the word. */
8824 int bad_subreg = 0;
8825
8826 /* Stabilize any component ref that might need to be
8827 evaluated more than once below. */
8828 if (!post
8829 || TREE_CODE (incremented) == BIT_FIELD_REF
8830 || (TREE_CODE (incremented) == COMPONENT_REF
8831 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8832 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8833 incremented = stabilize_reference (incremented);
8834 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8835 ones into save exprs so that they don't accidentally get evaluated
8836 more than once by the code below. */
8837 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8838 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8839 incremented = save_expr (incremented);
8840
8841 /* Compute the operands as RTX.
8842 Note whether OP0 is the actual lvalue or a copy of it:
8843 I believe it is a copy iff it is a register or subreg
8844 and insns were generated in computing it. */
8845
8846 temp = get_last_insn ();
8847 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8848
8849 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8850 in place but instead must do sign- or zero-extension during assignment,
8851 so we copy it into a new register and let the code below use it as
8852 a copy.
8853
8854 Note that we can safely modify this SUBREG since it is know not to be
8855 shared (it was made by the expand_expr call above). */
8856
8857 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8858 {
8859 if (post)
8860 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8861 else
8862 bad_subreg = 1;
8863 }
8864 else if (GET_CODE (op0) == SUBREG
8865 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8866 {
8867 /* We cannot increment this SUBREG in place. If we are
8868 post-incrementing, get a copy of the old value. Otherwise,
8869 just mark that we cannot increment in place. */
8870 if (post)
8871 op0 = copy_to_reg (op0);
8872 else
8873 bad_subreg = 1;
8874 }
8875
8876 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8877 && temp != get_last_insn ());
8878 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8879 EXPAND_MEMORY_USE_BAD);
8880
8881 /* Decide whether incrementing or decrementing. */
8882 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8883 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8884 this_optab = sub_optab;
8885
8886 /* Convert decrement by a constant into a negative increment. */
8887 if (this_optab == sub_optab
8888 && GET_CODE (op1) == CONST_INT)
8889 {
8890 op1 = GEN_INT (- INTVAL (op1));
8891 this_optab = add_optab;
8892 }
8893
8894 /* For a preincrement, see if we can do this with a single instruction. */
8895 if (!post)
8896 {
8897 icode = (int) this_optab->handlers[(int) mode].insn_code;
8898 if (icode != (int) CODE_FOR_nothing
8899 /* Make sure that OP0 is valid for operands 0 and 1
8900 of the insn we want to queue. */
8901 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8902 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8903 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8904 single_insn = 1;
8905 }
8906
8907 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8908 then we cannot just increment OP0. We must therefore contrive to
8909 increment the original value. Then, for postincrement, we can return
8910 OP0 since it is a copy of the old value. For preincrement, expand here
8911 unless we can do it with a single insn.
8912
8913 Likewise if storing directly into OP0 would clobber high bits
8914 we need to preserve (bad_subreg). */
8915 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8916 {
8917 /* This is the easiest way to increment the value wherever it is.
8918 Problems with multiple evaluation of INCREMENTED are prevented
8919 because either (1) it is a component_ref or preincrement,
8920 in which case it was stabilized above, or (2) it is an array_ref
8921 with constant index in an array in a register, which is
8922 safe to reevaluate. */
8923 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8924 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8925 ? MINUS_EXPR : PLUS_EXPR),
8926 TREE_TYPE (exp),
8927 incremented,
8928 TREE_OPERAND (exp, 1));
8929
8930 while (TREE_CODE (incremented) == NOP_EXPR
8931 || TREE_CODE (incremented) == CONVERT_EXPR)
8932 {
8933 newexp = convert (TREE_TYPE (incremented), newexp);
8934 incremented = TREE_OPERAND (incremented, 0);
8935 }
8936
8937 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8938 return post ? op0 : temp;
8939 }
8940
8941 if (post)
8942 {
8943 /* We have a true reference to the value in OP0.
8944 If there is an insn to add or subtract in this mode, queue it.
8945 Queueing the increment insn avoids the register shuffling
8946 that often results if we must increment now and first save
8947 the old value for subsequent use. */
8948
8949 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8950 op0 = stabilize (op0);
8951 #endif
8952
8953 icode = (int) this_optab->handlers[(int) mode].insn_code;
8954 if (icode != (int) CODE_FOR_nothing
8955 /* Make sure that OP0 is valid for operands 0 and 1
8956 of the insn we want to queue. */
8957 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8958 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8959 {
8960 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8961 op1 = force_reg (mode, op1);
8962
8963 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8964 }
8965 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8966 {
8967 rtx addr = (general_operand (XEXP (op0, 0), mode)
8968 ? force_reg (Pmode, XEXP (op0, 0))
8969 : copy_to_reg (XEXP (op0, 0)));
8970 rtx temp, result;
8971
8972 op0 = change_address (op0, VOIDmode, addr);
8973 temp = force_reg (GET_MODE (op0), op0);
8974 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8975 op1 = force_reg (mode, op1);
8976
8977 /* The increment queue is LIFO, thus we have to `queue'
8978 the instructions in reverse order. */
8979 enqueue_insn (op0, gen_move_insn (op0, temp));
8980 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8981 return result;
8982 }
8983 }
8984
8985 /* Preincrement, or we can't increment with one simple insn. */
8986 if (post)
8987 /* Save a copy of the value before inc or dec, to return it later. */
8988 temp = value = copy_to_reg (op0);
8989 else
8990 /* Arrange to return the incremented value. */
8991 /* Copy the rtx because expand_binop will protect from the queue,
8992 and the results of that would be invalid for us to return
8993 if our caller does emit_queue before using our result. */
8994 temp = copy_rtx (value = op0);
8995
8996 /* Increment however we can. */
8997 op1 = expand_binop (mode, this_optab, value, op1,
8998 current_function_check_memory_usage ? NULL_RTX : op0,
8999 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9000 /* Make sure the value is stored into OP0. */
9001 if (op1 != op0)
9002 emit_move_insn (op0, op1);
9003
9004 return temp;
9005 }
9006 \f
9007 /* Expand all function calls contained within EXP, innermost ones first.
9008 But don't look within expressions that have sequence points.
9009 For each CALL_EXPR, record the rtx for its value
9010 in the CALL_EXPR_RTL field. */
9011
9012 static void
9013 preexpand_calls (exp)
9014 tree exp;
9015 {
9016 register int nops, i;
9017 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9018
9019 if (! do_preexpand_calls)
9020 return;
9021
9022 /* Only expressions and references can contain calls. */
9023
9024 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9025 return;
9026
9027 switch (TREE_CODE (exp))
9028 {
9029 case CALL_EXPR:
9030 /* Do nothing if already expanded. */
9031 if (CALL_EXPR_RTL (exp) != 0
9032 /* Do nothing if the call returns a variable-sized object. */
9033 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9034 /* Do nothing to built-in functions. */
9035 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9036 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9037 == FUNCTION_DECL)
9038 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9039 return;
9040
9041 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9042 return;
9043
9044 case COMPOUND_EXPR:
9045 case COND_EXPR:
9046 case TRUTH_ANDIF_EXPR:
9047 case TRUTH_ORIF_EXPR:
9048 /* If we find one of these, then we can be sure
9049 the adjust will be done for it (since it makes jumps).
9050 Do it now, so that if this is inside an argument
9051 of a function, we don't get the stack adjustment
9052 after some other args have already been pushed. */
9053 do_pending_stack_adjust ();
9054 return;
9055
9056 case BLOCK:
9057 case RTL_EXPR:
9058 case WITH_CLEANUP_EXPR:
9059 case CLEANUP_POINT_EXPR:
9060 case TRY_CATCH_EXPR:
9061 return;
9062
9063 case SAVE_EXPR:
9064 if (SAVE_EXPR_RTL (exp) != 0)
9065 return;
9066
9067 default:
9068 break;
9069 }
9070
9071 nops = tree_code_length[(int) TREE_CODE (exp)];
9072 for (i = 0; i < nops; i++)
9073 if (TREE_OPERAND (exp, i) != 0)
9074 {
9075 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9076 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9077 It doesn't happen before the call is made. */
9078 ;
9079 else
9080 {
9081 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9082 if (type == 'e' || type == '<' || type == '1' || type == '2'
9083 || type == 'r')
9084 preexpand_calls (TREE_OPERAND (exp, i));
9085 }
9086 }
9087 }
9088 \f
9089 /* At the start of a function, record that we have no previously-pushed
9090 arguments waiting to be popped. */
9091
9092 void
9093 init_pending_stack_adjust ()
9094 {
9095 pending_stack_adjust = 0;
9096 }
9097
9098 /* When exiting from function, if safe, clear out any pending stack adjust
9099 so the adjustment won't get done.
9100
9101 Note, if the current function calls alloca, then it must have a
9102 frame pointer regardless of the value of flag_omit_frame_pointer. */
9103
9104 void
9105 clear_pending_stack_adjust ()
9106 {
9107 #ifdef EXIT_IGNORE_STACK
9108 if (optimize > 0
9109 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9110 && EXIT_IGNORE_STACK
9111 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9112 && ! flag_inline_functions)
9113 pending_stack_adjust = 0;
9114 #endif
9115 }
9116
9117 /* Pop any previously-pushed arguments that have not been popped yet. */
9118
9119 void
9120 do_pending_stack_adjust ()
9121 {
9122 if (inhibit_defer_pop == 0)
9123 {
9124 if (pending_stack_adjust != 0)
9125 adjust_stack (GEN_INT (pending_stack_adjust));
9126 pending_stack_adjust = 0;
9127 }
9128 }
9129 \f
9130 /* Expand conditional expressions. */
9131
9132 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9133 LABEL is an rtx of code CODE_LABEL, in this function and all the
9134 functions here. */
9135
9136 void
9137 jumpifnot (exp, label)
9138 tree exp;
9139 rtx label;
9140 {
9141 do_jump (exp, label, NULL_RTX);
9142 }
9143
9144 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9145
9146 void
9147 jumpif (exp, label)
9148 tree exp;
9149 rtx label;
9150 {
9151 do_jump (exp, NULL_RTX, label);
9152 }
9153
9154 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9155 the result is zero, or IF_TRUE_LABEL if the result is one.
9156 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9157 meaning fall through in that case.
9158
9159 do_jump always does any pending stack adjust except when it does not
9160 actually perform a jump. An example where there is no jump
9161 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9162
9163 This function is responsible for optimizing cases such as
9164 &&, || and comparison operators in EXP. */
9165
9166 void
9167 do_jump (exp, if_false_label, if_true_label)
9168 tree exp;
9169 rtx if_false_label, if_true_label;
9170 {
9171 register enum tree_code code = TREE_CODE (exp);
9172 /* Some cases need to create a label to jump to
9173 in order to properly fall through.
9174 These cases set DROP_THROUGH_LABEL nonzero. */
9175 rtx drop_through_label = 0;
9176 rtx temp;
9177 int i;
9178 tree type;
9179 enum machine_mode mode;
9180
9181 #ifdef MAX_INTEGER_COMPUTATION_MODE
9182 check_max_integer_computation_mode (exp);
9183 #endif
9184
9185 emit_queue ();
9186
9187 switch (code)
9188 {
9189 case ERROR_MARK:
9190 break;
9191
9192 case INTEGER_CST:
9193 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9194 if (temp)
9195 emit_jump (temp);
9196 break;
9197
9198 #if 0
9199 /* This is not true with #pragma weak */
9200 case ADDR_EXPR:
9201 /* The address of something can never be zero. */
9202 if (if_true_label)
9203 emit_jump (if_true_label);
9204 break;
9205 #endif
9206
9207 case NOP_EXPR:
9208 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9209 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9210 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9211 goto normal;
9212 case CONVERT_EXPR:
9213 /* If we are narrowing the operand, we have to do the compare in the
9214 narrower mode. */
9215 if ((TYPE_PRECISION (TREE_TYPE (exp))
9216 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9217 goto normal;
9218 case NON_LVALUE_EXPR:
9219 case REFERENCE_EXPR:
9220 case ABS_EXPR:
9221 case NEGATE_EXPR:
9222 case LROTATE_EXPR:
9223 case RROTATE_EXPR:
9224 /* These cannot change zero->non-zero or vice versa. */
9225 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9226 break;
9227
9228 case WITH_RECORD_EXPR:
9229 /* Put the object on the placeholder list, recurse through our first
9230 operand, and pop the list. */
9231 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9232 placeholder_list);
9233 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9234 placeholder_list = TREE_CHAIN (placeholder_list);
9235 break;
9236
9237 #if 0
9238 /* This is never less insns than evaluating the PLUS_EXPR followed by
9239 a test and can be longer if the test is eliminated. */
9240 case PLUS_EXPR:
9241 /* Reduce to minus. */
9242 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9243 TREE_OPERAND (exp, 0),
9244 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9245 TREE_OPERAND (exp, 1))));
9246 /* Process as MINUS. */
9247 #endif
9248
9249 case MINUS_EXPR:
9250 /* Non-zero iff operands of minus differ. */
9251 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9252 TREE_OPERAND (exp, 0),
9253 TREE_OPERAND (exp, 1)),
9254 NE, NE, if_false_label, if_true_label);
9255 break;
9256
9257 case BIT_AND_EXPR:
9258 /* If we are AND'ing with a small constant, do this comparison in the
9259 smallest type that fits. If the machine doesn't have comparisons
9260 that small, it will be converted back to the wider comparison.
9261 This helps if we are testing the sign bit of a narrower object.
9262 combine can't do this for us because it can't know whether a
9263 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9264
9265 if (! SLOW_BYTE_ACCESS
9266 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9267 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9268 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9269 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9270 && (type = type_for_mode (mode, 1)) != 0
9271 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9272 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9273 != CODE_FOR_nothing))
9274 {
9275 do_jump (convert (type, exp), if_false_label, if_true_label);
9276 break;
9277 }
9278 goto normal;
9279
9280 case TRUTH_NOT_EXPR:
9281 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9282 break;
9283
9284 case TRUTH_ANDIF_EXPR:
9285 if (if_false_label == 0)
9286 if_false_label = drop_through_label = gen_label_rtx ();
9287 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9288 start_cleanup_deferral ();
9289 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9290 end_cleanup_deferral ();
9291 break;
9292
9293 case TRUTH_ORIF_EXPR:
9294 if (if_true_label == 0)
9295 if_true_label = drop_through_label = gen_label_rtx ();
9296 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9297 start_cleanup_deferral ();
9298 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9299 end_cleanup_deferral ();
9300 break;
9301
9302 case COMPOUND_EXPR:
9303 push_temp_slots ();
9304 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9305 preserve_temp_slots (NULL_RTX);
9306 free_temp_slots ();
9307 pop_temp_slots ();
9308 emit_queue ();
9309 do_pending_stack_adjust ();
9310 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9311 break;
9312
9313 case COMPONENT_REF:
9314 case BIT_FIELD_REF:
9315 case ARRAY_REF:
9316 {
9317 int bitsize, bitpos, unsignedp;
9318 enum machine_mode mode;
9319 tree type;
9320 tree offset;
9321 int volatilep = 0;
9322 int alignment;
9323
9324 /* Get description of this reference. We don't actually care
9325 about the underlying object here. */
9326 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9327 &mode, &unsignedp, &volatilep,
9328 &alignment);
9329
9330 type = type_for_size (bitsize, unsignedp);
9331 if (! SLOW_BYTE_ACCESS
9332 && type != 0 && bitsize >= 0
9333 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9334 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9335 != CODE_FOR_nothing))
9336 {
9337 do_jump (convert (type, exp), if_false_label, if_true_label);
9338 break;
9339 }
9340 goto normal;
9341 }
9342
9343 case COND_EXPR:
9344 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9345 if (integer_onep (TREE_OPERAND (exp, 1))
9346 && integer_zerop (TREE_OPERAND (exp, 2)))
9347 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9348
9349 else if (integer_zerop (TREE_OPERAND (exp, 1))
9350 && integer_onep (TREE_OPERAND (exp, 2)))
9351 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9352
9353 else
9354 {
9355 register rtx label1 = gen_label_rtx ();
9356 drop_through_label = gen_label_rtx ();
9357
9358 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9359
9360 start_cleanup_deferral ();
9361 /* Now the THEN-expression. */
9362 do_jump (TREE_OPERAND (exp, 1),
9363 if_false_label ? if_false_label : drop_through_label,
9364 if_true_label ? if_true_label : drop_through_label);
9365 /* In case the do_jump just above never jumps. */
9366 do_pending_stack_adjust ();
9367 emit_label (label1);
9368
9369 /* Now the ELSE-expression. */
9370 do_jump (TREE_OPERAND (exp, 2),
9371 if_false_label ? if_false_label : drop_through_label,
9372 if_true_label ? if_true_label : drop_through_label);
9373 end_cleanup_deferral ();
9374 }
9375 break;
9376
9377 case EQ_EXPR:
9378 {
9379 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9380
9381 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9382 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9383 {
9384 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9385 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9386 do_jump
9387 (fold
9388 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9389 fold (build (EQ_EXPR, TREE_TYPE (exp),
9390 fold (build1 (REALPART_EXPR,
9391 TREE_TYPE (inner_type),
9392 exp0)),
9393 fold (build1 (REALPART_EXPR,
9394 TREE_TYPE (inner_type),
9395 exp1)))),
9396 fold (build (EQ_EXPR, TREE_TYPE (exp),
9397 fold (build1 (IMAGPART_EXPR,
9398 TREE_TYPE (inner_type),
9399 exp0)),
9400 fold (build1 (IMAGPART_EXPR,
9401 TREE_TYPE (inner_type),
9402 exp1)))))),
9403 if_false_label, if_true_label);
9404 }
9405
9406 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9407 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9408
9409 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9410 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
9411 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9412 else
9413 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9414 break;
9415 }
9416
9417 case NE_EXPR:
9418 {
9419 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9420
9421 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9422 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9423 {
9424 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9425 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9426 do_jump
9427 (fold
9428 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9429 fold (build (NE_EXPR, TREE_TYPE (exp),
9430 fold (build1 (REALPART_EXPR,
9431 TREE_TYPE (inner_type),
9432 exp0)),
9433 fold (build1 (REALPART_EXPR,
9434 TREE_TYPE (inner_type),
9435 exp1)))),
9436 fold (build (NE_EXPR, TREE_TYPE (exp),
9437 fold (build1 (IMAGPART_EXPR,
9438 TREE_TYPE (inner_type),
9439 exp0)),
9440 fold (build1 (IMAGPART_EXPR,
9441 TREE_TYPE (inner_type),
9442 exp1)))))),
9443 if_false_label, if_true_label);
9444 }
9445
9446 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9447 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9448
9449 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9450 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
9451 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9452 else
9453 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9454 break;
9455 }
9456
9457 case LT_EXPR:
9458 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9459 if (GET_MODE_CLASS (mode) == MODE_INT
9460 && ! can_compare_p (mode, ccp_jump))
9461 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9462 else
9463 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9464 break;
9465
9466 case LE_EXPR:
9467 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9468 if (GET_MODE_CLASS (mode) == MODE_INT
9469 && ! can_compare_p (mode, ccp_jump))
9470 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9471 else
9472 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9473 break;
9474
9475 case GT_EXPR:
9476 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9477 if (GET_MODE_CLASS (mode) == MODE_INT
9478 && ! can_compare_p (mode, ccp_jump))
9479 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9480 else
9481 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9482 break;
9483
9484 case GE_EXPR:
9485 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9486 if (GET_MODE_CLASS (mode) == MODE_INT
9487 && ! can_compare_p (mode, ccp_jump))
9488 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9489 else
9490 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9491 break;
9492
9493 default:
9494 normal:
9495 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9496 #if 0
9497 /* This is not needed any more and causes poor code since it causes
9498 comparisons and tests from non-SI objects to have different code
9499 sequences. */
9500 /* Copy to register to avoid generating bad insns by cse
9501 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9502 if (!cse_not_expected && GET_CODE (temp) == MEM)
9503 temp = copy_to_reg (temp);
9504 #endif
9505 do_pending_stack_adjust ();
9506 /* Do any postincrements in the expression that was tested. */
9507 emit_queue ();
9508
9509 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9510 {
9511 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9512 if (target)
9513 emit_jump (target);
9514 }
9515 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9516 && ! can_compare_p (GET_MODE (temp), ccp_jump))
9517 /* Note swapping the labels gives us not-equal. */
9518 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9519 else if (GET_MODE (temp) != VOIDmode)
9520 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9521 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9522 GET_MODE (temp), NULL_RTX, 0,
9523 if_false_label, if_true_label);
9524 else
9525 abort ();
9526 }
9527
9528 if (drop_through_label)
9529 {
9530 /* If do_jump produces code that might be jumped around,
9531 do any stack adjusts from that code, before the place
9532 where control merges in. */
9533 do_pending_stack_adjust ();
9534 emit_label (drop_through_label);
9535 }
9536 }
9537 \f
9538 /* Given a comparison expression EXP for values too wide to be compared
9539 with one insn, test the comparison and jump to the appropriate label.
9540 The code of EXP is ignored; we always test GT if SWAP is 0,
9541 and LT if SWAP is 1. */
9542
9543 static void
9544 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9545 tree exp;
9546 int swap;
9547 rtx if_false_label, if_true_label;
9548 {
9549 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9550 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9551 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9552 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9553
9554 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9555 }
9556
9557 /* Compare OP0 with OP1, word at a time, in mode MODE.
9558 UNSIGNEDP says to do unsigned comparison.
9559 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9560
9561 void
9562 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9563 enum machine_mode mode;
9564 int unsignedp;
9565 rtx op0, op1;
9566 rtx if_false_label, if_true_label;
9567 {
9568 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9569 rtx drop_through_label = 0;
9570 int i;
9571
9572 if (! if_true_label || ! if_false_label)
9573 drop_through_label = gen_label_rtx ();
9574 if (! if_true_label)
9575 if_true_label = drop_through_label;
9576 if (! if_false_label)
9577 if_false_label = drop_through_label;
9578
9579 /* Compare a word at a time, high order first. */
9580 for (i = 0; i < nwords; i++)
9581 {
9582 rtx op0_word, op1_word;
9583
9584 if (WORDS_BIG_ENDIAN)
9585 {
9586 op0_word = operand_subword_force (op0, i, mode);
9587 op1_word = operand_subword_force (op1, i, mode);
9588 }
9589 else
9590 {
9591 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9592 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9593 }
9594
9595 /* All but high-order word must be compared as unsigned. */
9596 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9597 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9598 NULL_RTX, if_true_label);
9599
9600 /* Consider lower words only if these are equal. */
9601 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9602 NULL_RTX, 0, NULL_RTX, if_false_label);
9603 }
9604
9605 if (if_false_label)
9606 emit_jump (if_false_label);
9607 if (drop_through_label)
9608 emit_label (drop_through_label);
9609 }
9610
9611 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9612 with one insn, test the comparison and jump to the appropriate label. */
9613
9614 static void
9615 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9616 tree exp;
9617 rtx if_false_label, if_true_label;
9618 {
9619 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9620 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9621 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9622 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9623 int i;
9624 rtx drop_through_label = 0;
9625
9626 if (! if_false_label)
9627 drop_through_label = if_false_label = gen_label_rtx ();
9628
9629 for (i = 0; i < nwords; i++)
9630 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9631 operand_subword_force (op1, i, mode),
9632 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9633 word_mode, NULL_RTX, 0, if_false_label,
9634 NULL_RTX);
9635
9636 if (if_true_label)
9637 emit_jump (if_true_label);
9638 if (drop_through_label)
9639 emit_label (drop_through_label);
9640 }
9641 \f
9642 /* Jump according to whether OP0 is 0.
9643 We assume that OP0 has an integer mode that is too wide
9644 for the available compare insns. */
9645
9646 void
9647 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9648 rtx op0;
9649 rtx if_false_label, if_true_label;
9650 {
9651 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9652 rtx part;
9653 int i;
9654 rtx drop_through_label = 0;
9655
9656 /* The fastest way of doing this comparison on almost any machine is to
9657 "or" all the words and compare the result. If all have to be loaded
9658 from memory and this is a very wide item, it's possible this may
9659 be slower, but that's highly unlikely. */
9660
9661 part = gen_reg_rtx (word_mode);
9662 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9663 for (i = 1; i < nwords && part != 0; i++)
9664 part = expand_binop (word_mode, ior_optab, part,
9665 operand_subword_force (op0, i, GET_MODE (op0)),
9666 part, 1, OPTAB_WIDEN);
9667
9668 if (part != 0)
9669 {
9670 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9671 NULL_RTX, 0, if_false_label, if_true_label);
9672
9673 return;
9674 }
9675
9676 /* If we couldn't do the "or" simply, do this with a series of compares. */
9677 if (! if_false_label)
9678 drop_through_label = if_false_label = gen_label_rtx ();
9679
9680 for (i = 0; i < nwords; i++)
9681 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9682 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9683 if_false_label, NULL_RTX);
9684
9685 if (if_true_label)
9686 emit_jump (if_true_label);
9687
9688 if (drop_through_label)
9689 emit_label (drop_through_label);
9690 }
9691 \f
9692 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9693 (including code to compute the values to be compared)
9694 and set (CC0) according to the result.
9695 The decision as to signed or unsigned comparison must be made by the caller.
9696
9697 We force a stack adjustment unless there are currently
9698 things pushed on the stack that aren't yet used.
9699
9700 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9701 compared.
9702
9703 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9704 size of MODE should be used. */
9705
9706 rtx
9707 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9708 register rtx op0, op1;
9709 enum rtx_code code;
9710 int unsignedp;
9711 enum machine_mode mode;
9712 rtx size;
9713 int align;
9714 {
9715 rtx tem;
9716
9717 /* If one operand is constant, make it the second one. Only do this
9718 if the other operand is not constant as well. */
9719
9720 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9721 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9722 {
9723 tem = op0;
9724 op0 = op1;
9725 op1 = tem;
9726 code = swap_condition (code);
9727 }
9728
9729 if (flag_force_mem)
9730 {
9731 op0 = force_not_mem (op0);
9732 op1 = force_not_mem (op1);
9733 }
9734
9735 do_pending_stack_adjust ();
9736
9737 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9738 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9739 return tem;
9740
9741 #if 0
9742 /* There's no need to do this now that combine.c can eliminate lots of
9743 sign extensions. This can be less efficient in certain cases on other
9744 machines. */
9745
9746 /* If this is a signed equality comparison, we can do it as an
9747 unsigned comparison since zero-extension is cheaper than sign
9748 extension and comparisons with zero are done as unsigned. This is
9749 the case even on machines that can do fast sign extension, since
9750 zero-extension is easier to combine with other operations than
9751 sign-extension is. If we are comparing against a constant, we must
9752 convert it to what it would look like unsigned. */
9753 if ((code == EQ || code == NE) && ! unsignedp
9754 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9755 {
9756 if (GET_CODE (op1) == CONST_INT
9757 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9758 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9759 unsignedp = 1;
9760 }
9761 #endif
9762
9763 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9764
9765 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9766 }
9767
9768 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9769 The decision as to signed or unsigned comparison must be made by the caller.
9770
9771 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9772 compared.
9773
9774 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9775 size of MODE should be used. */
9776
9777 void
9778 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9779 if_false_label, if_true_label)
9780 register rtx op0, op1;
9781 enum rtx_code code;
9782 int unsignedp;
9783 enum machine_mode mode;
9784 rtx size;
9785 int align;
9786 rtx if_false_label, if_true_label;
9787 {
9788 rtx tem;
9789 int dummy_true_label = 0;
9790
9791 /* Reverse the comparison if that is safe and we want to jump if it is
9792 false. */
9793 if (! if_true_label && ! FLOAT_MODE_P (mode))
9794 {
9795 if_true_label = if_false_label;
9796 if_false_label = 0;
9797 code = reverse_condition (code);
9798 }
9799
9800 /* If one operand is constant, make it the second one. Only do this
9801 if the other operand is not constant as well. */
9802
9803 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9804 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9805 {
9806 tem = op0;
9807 op0 = op1;
9808 op1 = tem;
9809 code = swap_condition (code);
9810 }
9811
9812 if (flag_force_mem)
9813 {
9814 op0 = force_not_mem (op0);
9815 op1 = force_not_mem (op1);
9816 }
9817
9818 do_pending_stack_adjust ();
9819
9820 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9821 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9822 {
9823 if (tem == const_true_rtx)
9824 {
9825 if (if_true_label)
9826 emit_jump (if_true_label);
9827 }
9828 else
9829 {
9830 if (if_false_label)
9831 emit_jump (if_false_label);
9832 }
9833 return;
9834 }
9835
9836 #if 0
9837 /* There's no need to do this now that combine.c can eliminate lots of
9838 sign extensions. This can be less efficient in certain cases on other
9839 machines. */
9840
9841 /* If this is a signed equality comparison, we can do it as an
9842 unsigned comparison since zero-extension is cheaper than sign
9843 extension and comparisons with zero are done as unsigned. This is
9844 the case even on machines that can do fast sign extension, since
9845 zero-extension is easier to combine with other operations than
9846 sign-extension is. If we are comparing against a constant, we must
9847 convert it to what it would look like unsigned. */
9848 if ((code == EQ || code == NE) && ! unsignedp
9849 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9850 {
9851 if (GET_CODE (op1) == CONST_INT
9852 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9853 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9854 unsignedp = 1;
9855 }
9856 #endif
9857
9858 if (! if_true_label)
9859 {
9860 dummy_true_label = 1;
9861 if_true_label = gen_label_rtx ();
9862 }
9863
9864 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9865 if_true_label);
9866
9867 if (if_false_label)
9868 emit_jump (if_false_label);
9869 if (dummy_true_label)
9870 emit_label (if_true_label);
9871 }
9872
9873 /* Generate code for a comparison expression EXP (including code to compute
9874 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9875 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9876 generated code will drop through.
9877 SIGNED_CODE should be the rtx operation for this comparison for
9878 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9879
9880 We force a stack adjustment unless there are currently
9881 things pushed on the stack that aren't yet used. */
9882
9883 static void
9884 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9885 if_true_label)
9886 register tree exp;
9887 enum rtx_code signed_code, unsigned_code;
9888 rtx if_false_label, if_true_label;
9889 {
9890 int align0, align1;
9891 register rtx op0, op1;
9892 register tree type;
9893 register enum machine_mode mode;
9894 int unsignedp;
9895 enum rtx_code code;
9896
9897 /* Don't crash if the comparison was erroneous. */
9898 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9899 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9900 return;
9901
9902 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
9903 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9904 mode = TYPE_MODE (type);
9905 unsignedp = TREE_UNSIGNED (type);
9906 code = unsignedp ? unsigned_code : signed_code;
9907
9908 #ifdef HAVE_canonicalize_funcptr_for_compare
9909 /* If function pointers need to be "canonicalized" before they can
9910 be reliably compared, then canonicalize them. */
9911 if (HAVE_canonicalize_funcptr_for_compare
9912 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9913 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9914 == FUNCTION_TYPE))
9915 {
9916 rtx new_op0 = gen_reg_rtx (mode);
9917
9918 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9919 op0 = new_op0;
9920 }
9921
9922 if (HAVE_canonicalize_funcptr_for_compare
9923 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9924 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9925 == FUNCTION_TYPE))
9926 {
9927 rtx new_op1 = gen_reg_rtx (mode);
9928
9929 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9930 op1 = new_op1;
9931 }
9932 #endif
9933
9934 /* Do any postincrements in the expression that was tested. */
9935 emit_queue ();
9936
9937 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9938 ((mode == BLKmode)
9939 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9940 MIN (align0, align1) / BITS_PER_UNIT,
9941 if_false_label, if_true_label);
9942 }
9943 \f
9944 /* Generate code to calculate EXP using a store-flag instruction
9945 and return an rtx for the result. EXP is either a comparison
9946 or a TRUTH_NOT_EXPR whose operand is a comparison.
9947
9948 If TARGET is nonzero, store the result there if convenient.
9949
9950 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9951 cheap.
9952
9953 Return zero if there is no suitable set-flag instruction
9954 available on this machine.
9955
9956 Once expand_expr has been called on the arguments of the comparison,
9957 we are committed to doing the store flag, since it is not safe to
9958 re-evaluate the expression. We emit the store-flag insn by calling
9959 emit_store_flag, but only expand the arguments if we have a reason
9960 to believe that emit_store_flag will be successful. If we think that
9961 it will, but it isn't, we have to simulate the store-flag with a
9962 set/jump/set sequence. */
9963
9964 static rtx
9965 do_store_flag (exp, target, mode, only_cheap)
9966 tree exp;
9967 rtx target;
9968 enum machine_mode mode;
9969 int only_cheap;
9970 {
9971 enum rtx_code code;
9972 tree arg0, arg1, type;
9973 tree tem;
9974 enum machine_mode operand_mode;
9975 int invert = 0;
9976 int unsignedp;
9977 rtx op0, op1;
9978 enum insn_code icode;
9979 rtx subtarget = target;
9980 rtx result, label;
9981
9982 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9983 result at the end. We can't simply invert the test since it would
9984 have already been inverted if it were valid. This case occurs for
9985 some floating-point comparisons. */
9986
9987 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9988 invert = 1, exp = TREE_OPERAND (exp, 0);
9989
9990 arg0 = TREE_OPERAND (exp, 0);
9991 arg1 = TREE_OPERAND (exp, 1);
9992 type = TREE_TYPE (arg0);
9993 operand_mode = TYPE_MODE (type);
9994 unsignedp = TREE_UNSIGNED (type);
9995
9996 /* We won't bother with BLKmode store-flag operations because it would mean
9997 passing a lot of information to emit_store_flag. */
9998 if (operand_mode == BLKmode)
9999 return 0;
10000
10001 /* We won't bother with store-flag operations involving function pointers
10002 when function pointers must be canonicalized before comparisons. */
10003 #ifdef HAVE_canonicalize_funcptr_for_compare
10004 if (HAVE_canonicalize_funcptr_for_compare
10005 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10006 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10007 == FUNCTION_TYPE))
10008 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10009 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10010 == FUNCTION_TYPE))))
10011 return 0;
10012 #endif
10013
10014 STRIP_NOPS (arg0);
10015 STRIP_NOPS (arg1);
10016
10017 /* Get the rtx comparison code to use. We know that EXP is a comparison
10018 operation of some type. Some comparisons against 1 and -1 can be
10019 converted to comparisons with zero. Do so here so that the tests
10020 below will be aware that we have a comparison with zero. These
10021 tests will not catch constants in the first operand, but constants
10022 are rarely passed as the first operand. */
10023
10024 switch (TREE_CODE (exp))
10025 {
10026 case EQ_EXPR:
10027 code = EQ;
10028 break;
10029 case NE_EXPR:
10030 code = NE;
10031 break;
10032 case LT_EXPR:
10033 if (integer_onep (arg1))
10034 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10035 else
10036 code = unsignedp ? LTU : LT;
10037 break;
10038 case LE_EXPR:
10039 if (! unsignedp && integer_all_onesp (arg1))
10040 arg1 = integer_zero_node, code = LT;
10041 else
10042 code = unsignedp ? LEU : LE;
10043 break;
10044 case GT_EXPR:
10045 if (! unsignedp && integer_all_onesp (arg1))
10046 arg1 = integer_zero_node, code = GE;
10047 else
10048 code = unsignedp ? GTU : GT;
10049 break;
10050 case GE_EXPR:
10051 if (integer_onep (arg1))
10052 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10053 else
10054 code = unsignedp ? GEU : GE;
10055 break;
10056 default:
10057 abort ();
10058 }
10059
10060 /* Put a constant second. */
10061 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10062 {
10063 tem = arg0; arg0 = arg1; arg1 = tem;
10064 code = swap_condition (code);
10065 }
10066
10067 /* If this is an equality or inequality test of a single bit, we can
10068 do this by shifting the bit being tested to the low-order bit and
10069 masking the result with the constant 1. If the condition was EQ,
10070 we xor it with 1. This does not require an scc insn and is faster
10071 than an scc insn even if we have it. */
10072
10073 if ((code == NE || code == EQ)
10074 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10075 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10076 {
10077 tree inner = TREE_OPERAND (arg0, 0);
10078 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10079 int ops_unsignedp;
10080
10081 /* If INNER is a right shift of a constant and it plus BITNUM does
10082 not overflow, adjust BITNUM and INNER. */
10083
10084 if (TREE_CODE (inner) == RSHIFT_EXPR
10085 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10086 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10087 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10088 < TYPE_PRECISION (type)))
10089 {
10090 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10091 inner = TREE_OPERAND (inner, 0);
10092 }
10093
10094 /* If we are going to be able to omit the AND below, we must do our
10095 operations as unsigned. If we must use the AND, we have a choice.
10096 Normally unsigned is faster, but for some machines signed is. */
10097 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10098 #ifdef LOAD_EXTEND_OP
10099 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10100 #else
10101 : 1
10102 #endif
10103 );
10104
10105 if (subtarget == 0 || GET_CODE (subtarget) != REG
10106 || GET_MODE (subtarget) != operand_mode
10107 || ! safe_from_p (subtarget, inner, 1))
10108 subtarget = 0;
10109
10110 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10111
10112 if (bitnum != 0)
10113 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10114 size_int (bitnum), subtarget, ops_unsignedp);
10115
10116 if (GET_MODE (op0) != mode)
10117 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10118
10119 if ((code == EQ && ! invert) || (code == NE && invert))
10120 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10121 ops_unsignedp, OPTAB_LIB_WIDEN);
10122
10123 /* Put the AND last so it can combine with more things. */
10124 if (bitnum != TYPE_PRECISION (type) - 1)
10125 op0 = expand_and (op0, const1_rtx, subtarget);
10126
10127 return op0;
10128 }
10129
10130 /* Now see if we are likely to be able to do this. Return if not. */
10131 if (! can_compare_p (operand_mode, ccp_store_flag))
10132 return 0;
10133 icode = setcc_gen_code[(int) code];
10134 if (icode == CODE_FOR_nothing
10135 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10136 {
10137 /* We can only do this if it is one of the special cases that
10138 can be handled without an scc insn. */
10139 if ((code == LT && integer_zerop (arg1))
10140 || (! only_cheap && code == GE && integer_zerop (arg1)))
10141 ;
10142 else if (BRANCH_COST >= 0
10143 && ! only_cheap && (code == NE || code == EQ)
10144 && TREE_CODE (type) != REAL_TYPE
10145 && ((abs_optab->handlers[(int) operand_mode].insn_code
10146 != CODE_FOR_nothing)
10147 || (ffs_optab->handlers[(int) operand_mode].insn_code
10148 != CODE_FOR_nothing)))
10149 ;
10150 else
10151 return 0;
10152 }
10153
10154 preexpand_calls (exp);
10155 if (subtarget == 0 || GET_CODE (subtarget) != REG
10156 || GET_MODE (subtarget) != operand_mode
10157 || ! safe_from_p (subtarget, arg1, 1))
10158 subtarget = 0;
10159
10160 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10161 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10162
10163 if (target == 0)
10164 target = gen_reg_rtx (mode);
10165
10166 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10167 because, if the emit_store_flag does anything it will succeed and
10168 OP0 and OP1 will not be used subsequently. */
10169
10170 result = emit_store_flag (target, code,
10171 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10172 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10173 operand_mode, unsignedp, 1);
10174
10175 if (result)
10176 {
10177 if (invert)
10178 result = expand_binop (mode, xor_optab, result, const1_rtx,
10179 result, 0, OPTAB_LIB_WIDEN);
10180 return result;
10181 }
10182
10183 /* If this failed, we have to do this with set/compare/jump/set code. */
10184 if (GET_CODE (target) != REG
10185 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10186 target = gen_reg_rtx (GET_MODE (target));
10187
10188 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10189 result = compare_from_rtx (op0, op1, code, unsignedp,
10190 operand_mode, NULL_RTX, 0);
10191 if (GET_CODE (result) == CONST_INT)
10192 return (((result == const0_rtx && ! invert)
10193 || (result != const0_rtx && invert))
10194 ? const0_rtx : const1_rtx);
10195
10196 label = gen_label_rtx ();
10197 if (bcc_gen_fctn[(int) code] == 0)
10198 abort ();
10199
10200 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10201 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10202 emit_label (label);
10203
10204 return target;
10205 }
10206 \f
10207 /* Generate a tablejump instruction (used for switch statements). */
10208
10209 #ifdef HAVE_tablejump
10210
10211 /* INDEX is the value being switched on, with the lowest value
10212 in the table already subtracted.
10213 MODE is its expected mode (needed if INDEX is constant).
10214 RANGE is the length of the jump table.
10215 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10216
10217 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10218 index value is out of range. */
10219
10220 void
10221 do_tablejump (index, mode, range, table_label, default_label)
10222 rtx index, range, table_label, default_label;
10223 enum machine_mode mode;
10224 {
10225 register rtx temp, vector;
10226
10227 /* Do an unsigned comparison (in the proper mode) between the index
10228 expression and the value which represents the length of the range.
10229 Since we just finished subtracting the lower bound of the range
10230 from the index expression, this comparison allows us to simultaneously
10231 check that the original index expression value is both greater than
10232 or equal to the minimum value of the range and less than or equal to
10233 the maximum value of the range. */
10234
10235 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10236 0, default_label);
10237
10238 /* If index is in range, it must fit in Pmode.
10239 Convert to Pmode so we can index with it. */
10240 if (mode != Pmode)
10241 index = convert_to_mode (Pmode, index, 1);
10242
10243 /* Don't let a MEM slip thru, because then INDEX that comes
10244 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10245 and break_out_memory_refs will go to work on it and mess it up. */
10246 #ifdef PIC_CASE_VECTOR_ADDRESS
10247 if (flag_pic && GET_CODE (index) != REG)
10248 index = copy_to_mode_reg (Pmode, index);
10249 #endif
10250
10251 /* If flag_force_addr were to affect this address
10252 it could interfere with the tricky assumptions made
10253 about addresses that contain label-refs,
10254 which may be valid only very near the tablejump itself. */
10255 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10256 GET_MODE_SIZE, because this indicates how large insns are. The other
10257 uses should all be Pmode, because they are addresses. This code
10258 could fail if addresses and insns are not the same size. */
10259 index = gen_rtx_PLUS (Pmode,
10260 gen_rtx_MULT (Pmode, index,
10261 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10262 gen_rtx_LABEL_REF (Pmode, table_label));
10263 #ifdef PIC_CASE_VECTOR_ADDRESS
10264 if (flag_pic)
10265 index = PIC_CASE_VECTOR_ADDRESS (index);
10266 else
10267 #endif
10268 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10269 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10270 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10271 RTX_UNCHANGING_P (vector) = 1;
10272 convert_move (temp, vector, 0);
10273
10274 emit_jump_insn (gen_tablejump (temp, table_label));
10275
10276 /* If we are generating PIC code or if the table is PC-relative, the
10277 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10278 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10279 emit_barrier ();
10280 }
10281
10282 #endif /* HAVE_tablejump */