expr.c (emit_push_insn): New argument alignment_pad.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "tm_p.h"
45
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
53
54 #ifdef PUSH_ROUNDING
55
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
59
60 #endif
61
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
69
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
73 #endif
74
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81 int cse_not_expected;
82
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
87
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage;
92
93 /* This structure is used by move_by_pieces to describe the move to
94 be performed. */
95 struct move_by_pieces
96 {
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 int to_struct;
102 int to_readonly;
103 rtx from;
104 rtx from_addr;
105 int autinc_from;
106 int explicit_inc_from;
107 int from_struct;
108 int from_readonly;
109 int len;
110 int offset;
111 int reverse;
112 };
113
114 /* This structure is used by clear_by_pieces to describe the clear to
115 be performed. */
116
117 struct clear_by_pieces
118 {
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 int to_struct;
124 int len;
125 int offset;
126 int reverse;
127 };
128
129 extern struct obstack permanent_obstack;
130
131 static rtx get_push_address PROTO ((int));
132
133 static rtx enqueue_insn PROTO((rtx, rtx));
134 static int move_by_pieces_ninsns PROTO((unsigned int, int));
135 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
136 struct move_by_pieces *));
137 static void clear_by_pieces PROTO((rtx, int, int));
138 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
139 enum machine_mode,
140 struct clear_by_pieces *));
141 static int is_zeros_p PROTO((tree));
142 static int mostly_zeros_p PROTO((tree));
143 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
144 tree, tree, int, int));
145 static void store_constructor PROTO((tree, rtx, int, int));
146 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
147 enum machine_mode, int, int,
148 int, int));
149 static enum memory_use_mode
150 get_memory_usage_from_modifier PROTO((enum expand_modifier));
151 static tree save_noncopied_parts PROTO((tree, tree));
152 static tree init_noncopied_parts PROTO((tree, tree));
153 static int safe_from_p PROTO((rtx, tree, int));
154 static int fixed_type_p PROTO((tree));
155 static rtx var_rtx PROTO((tree));
156 static rtx expand_increment PROTO((tree, int, int));
157 static void preexpand_calls PROTO((tree));
158 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
159 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
160 static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
161 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
162
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169
170 /* If a memory-to-memory move would take MOVE_RATIO or more simple
171 move-instruction sequences, we will do a movstr or libcall instead. */
172
173 #ifndef MOVE_RATIO
174 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175 #define MOVE_RATIO 2
176 #else
177 /* If we are optimizing for space (-Os), cut down the default move ratio */
178 #define MOVE_RATIO (optimize_size ? 3 : 15)
179 #endif
180 #endif
181
182 /* This macro is used to determine whether move_by_pieces should be called
183 to perform a structure copy. */
184 #ifndef MOVE_BY_PIECES_P
185 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
186 (SIZE, ALIGN) < MOVE_RATIO)
187 #endif
188
189 /* This array records the insn_code of insns to perform block moves. */
190 enum insn_code movstr_optab[NUM_MACHINE_MODES];
191
192 /* This array records the insn_code of insns to perform block clears. */
193 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
194
195 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
196
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
199 #endif
200 \f
201 /* This is run once per compilation to set up which modes can be used
202 directly in memory and to initialize the block move optab. */
203
204 void
205 init_expr_once ()
206 {
207 rtx insn, pat;
208 enum machine_mode mode;
209 int num_clobbers;
210 rtx mem, mem1;
211 char *free_point;
212
213 start_sequence ();
214
215 /* Since we are on the permanent obstack, we must be sure we save this
216 spot AFTER we call start_sequence, since it will reuse the rtl it
217 makes. */
218 free_point = (char *) oballoc (0);
219
220 /* Try indexing by frame ptr and try by stack ptr.
221 It is known that on the Convex the stack ptr isn't a valid index.
222 With luck, one or the other is valid on any machine. */
223 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
224 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
225
226 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
227 pat = PATTERN (insn);
228
229 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
230 mode = (enum machine_mode) ((int) mode + 1))
231 {
232 int regno;
233 rtx reg;
234
235 direct_load[(int) mode] = direct_store[(int) mode] = 0;
236 PUT_MODE (mem, mode);
237 PUT_MODE (mem1, mode);
238
239 /* See if there is some register that can be used in this mode and
240 directly loaded or stored from memory. */
241
242 if (mode != VOIDmode && mode != BLKmode)
243 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
244 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
245 regno++)
246 {
247 if (! HARD_REGNO_MODE_OK (regno, mode))
248 continue;
249
250 reg = gen_rtx_REG (mode, regno);
251
252 SET_SRC (pat) = mem;
253 SET_DEST (pat) = reg;
254 if (recog (pat, insn, &num_clobbers) >= 0)
255 direct_load[(int) mode] = 1;
256
257 SET_SRC (pat) = mem1;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
261
262 SET_SRC (pat) = reg;
263 SET_DEST (pat) = mem;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_store[(int) mode] = 1;
266
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem1;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
271 }
272 }
273
274 end_sequence ();
275 obfree (free_point);
276 }
277
278 /* This is run at the start of compiling a function. */
279
280 void
281 init_expr ()
282 {
283 current_function->expr
284 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
285
286 pending_chain = 0;
287 pending_stack_adjust = 0;
288 inhibit_defer_pop = 0;
289 saveregs_value = 0;
290 apply_args_value = 0;
291 forced_labels = 0;
292 }
293
294 void
295 mark_expr_status (p)
296 struct expr_status *p;
297 {
298 if (p == NULL)
299 return;
300
301 ggc_mark_rtx (p->x_saveregs_value);
302 ggc_mark_rtx (p->x_apply_args_value);
303 ggc_mark_rtx (p->x_forced_labels);
304 }
305
306 void
307 free_expr_status (f)
308 struct function *f;
309 {
310 free (f->expr);
311 f->expr = NULL;
312 }
313
314 /* Small sanity check that the queue is empty at the end of a function. */
315 void
316 finish_expr_for_function ()
317 {
318 if (pending_chain)
319 abort ();
320 }
321 \f
322 /* Manage the queue of increment instructions to be output
323 for POSTINCREMENT_EXPR expressions, etc. */
324
325 /* Queue up to increment (or change) VAR later. BODY says how:
326 BODY should be the same thing you would pass to emit_insn
327 to increment right away. It will go to emit_insn later on.
328
329 The value is a QUEUED expression to be used in place of VAR
330 where you want to guarantee the pre-incrementation value of VAR. */
331
332 static rtx
333 enqueue_insn (var, body)
334 rtx var, body;
335 {
336 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
337 body, pending_chain);
338 return pending_chain;
339 }
340
341 /* Use protect_from_queue to convert a QUEUED expression
342 into something that you can put immediately into an instruction.
343 If the queued incrementation has not happened yet,
344 protect_from_queue returns the variable itself.
345 If the incrementation has happened, protect_from_queue returns a temp
346 that contains a copy of the old value of the variable.
347
348 Any time an rtx which might possibly be a QUEUED is to be put
349 into an instruction, it must be passed through protect_from_queue first.
350 QUEUED expressions are not meaningful in instructions.
351
352 Do not pass a value through protect_from_queue and then hold
353 on to it for a while before putting it in an instruction!
354 If the queue is flushed in between, incorrect code will result. */
355
356 rtx
357 protect_from_queue (x, modify)
358 register rtx x;
359 int modify;
360 {
361 register RTX_CODE code = GET_CODE (x);
362
363 #if 0 /* A QUEUED can hang around after the queue is forced out. */
364 /* Shortcut for most common case. */
365 if (pending_chain == 0)
366 return x;
367 #endif
368
369 if (code != QUEUED)
370 {
371 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
372 use of autoincrement. Make a copy of the contents of the memory
373 location rather than a copy of the address, but not if the value is
374 of mode BLKmode. Don't modify X in place since it might be
375 shared. */
376 if (code == MEM && GET_MODE (x) != BLKmode
377 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
378 {
379 register rtx y = XEXP (x, 0);
380 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
381
382 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
383 MEM_COPY_ATTRIBUTES (new, x);
384 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
385
386 if (QUEUED_INSN (y))
387 {
388 register rtx temp = gen_reg_rtx (GET_MODE (new));
389 emit_insn_before (gen_move_insn (temp, new),
390 QUEUED_INSN (y));
391 return temp;
392 }
393 return new;
394 }
395 /* Otherwise, recursively protect the subexpressions of all
396 the kinds of rtx's that can contain a QUEUED. */
397 if (code == MEM)
398 {
399 rtx tem = protect_from_queue (XEXP (x, 0), 0);
400 if (tem != XEXP (x, 0))
401 {
402 x = copy_rtx (x);
403 XEXP (x, 0) = tem;
404 }
405 }
406 else if (code == PLUS || code == MULT)
407 {
408 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
409 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
410 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
411 {
412 x = copy_rtx (x);
413 XEXP (x, 0) = new0;
414 XEXP (x, 1) = new1;
415 }
416 }
417 return x;
418 }
419 /* If the increment has not happened, use the variable itself. */
420 if (QUEUED_INSN (x) == 0)
421 return QUEUED_VAR (x);
422 /* If the increment has happened and a pre-increment copy exists,
423 use that copy. */
424 if (QUEUED_COPY (x) != 0)
425 return QUEUED_COPY (x);
426 /* The increment has happened but we haven't set up a pre-increment copy.
427 Set one up now, and use it. */
428 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
429 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
430 QUEUED_INSN (x));
431 return QUEUED_COPY (x);
432 }
433
434 /* Return nonzero if X contains a QUEUED expression:
435 if it contains anything that will be altered by a queued increment.
436 We handle only combinations of MEM, PLUS, MINUS and MULT operators
437 since memory addresses generally contain only those. */
438
439 int
440 queued_subexp_p (x)
441 rtx x;
442 {
443 register enum rtx_code code = GET_CODE (x);
444 switch (code)
445 {
446 case QUEUED:
447 return 1;
448 case MEM:
449 return queued_subexp_p (XEXP (x, 0));
450 case MULT:
451 case PLUS:
452 case MINUS:
453 return (queued_subexp_p (XEXP (x, 0))
454 || queued_subexp_p (XEXP (x, 1)));
455 default:
456 return 0;
457 }
458 }
459
460 /* Perform all the pending incrementations. */
461
462 void
463 emit_queue ()
464 {
465 register rtx p;
466 while ((p = pending_chain))
467 {
468 rtx body = QUEUED_BODY (p);
469
470 if (GET_CODE (body) == SEQUENCE)
471 {
472 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
473 emit_insn (QUEUED_BODY (p));
474 }
475 else
476 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
477 pending_chain = QUEUED_NEXT (p);
478 }
479 }
480 \f
481 /* Copy data from FROM to TO, where the machine modes are not the same.
482 Both modes may be integer, or both may be floating.
483 UNSIGNEDP should be nonzero if FROM is an unsigned type.
484 This causes zero-extension instead of sign-extension. */
485
486 void
487 convert_move (to, from, unsignedp)
488 register rtx to, from;
489 int unsignedp;
490 {
491 enum machine_mode to_mode = GET_MODE (to);
492 enum machine_mode from_mode = GET_MODE (from);
493 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
494 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
495 enum insn_code code;
496 rtx libcall;
497
498 /* rtx code for making an equivalent value. */
499 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
500
501 to = protect_from_queue (to, 1);
502 from = protect_from_queue (from, 0);
503
504 if (to_real != from_real)
505 abort ();
506
507 /* If FROM is a SUBREG that indicates that we have already done at least
508 the required extension, strip it. We don't handle such SUBREGs as
509 TO here. */
510
511 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
512 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
513 >= GET_MODE_SIZE (to_mode))
514 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
515 from = gen_lowpart (to_mode, from), from_mode = to_mode;
516
517 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
518 abort ();
519
520 if (to_mode == from_mode
521 || (from_mode == VOIDmode && CONSTANT_P (from)))
522 {
523 emit_move_insn (to, from);
524 return;
525 }
526
527 if (to_real)
528 {
529 rtx value;
530
531 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
532 {
533 /* Try converting directly if the insn is supported. */
534 if ((code = can_extend_p (to_mode, from_mode, 0))
535 != CODE_FOR_nothing)
536 {
537 emit_unop_insn (code, to, from, UNKNOWN);
538 return;
539 }
540 }
541
542 #ifdef HAVE_trunchfqf2
543 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
544 {
545 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
546 return;
547 }
548 #endif
549 #ifdef HAVE_trunctqfqf2
550 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
551 {
552 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
553 return;
554 }
555 #endif
556 #ifdef HAVE_truncsfqf2
557 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
558 {
559 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
560 return;
561 }
562 #endif
563 #ifdef HAVE_truncdfqf2
564 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
565 {
566 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
567 return;
568 }
569 #endif
570 #ifdef HAVE_truncxfqf2
571 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
572 {
573 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
574 return;
575 }
576 #endif
577 #ifdef HAVE_trunctfqf2
578 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
581 return;
582 }
583 #endif
584
585 #ifdef HAVE_trunctqfhf2
586 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
587 {
588 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
589 return;
590 }
591 #endif
592 #ifdef HAVE_truncsfhf2
593 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
594 {
595 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
596 return;
597 }
598 #endif
599 #ifdef HAVE_truncdfhf2
600 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
601 {
602 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
603 return;
604 }
605 #endif
606 #ifdef HAVE_truncxfhf2
607 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
608 {
609 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
610 return;
611 }
612 #endif
613 #ifdef HAVE_trunctfhf2
614 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
615 {
616 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
617 return;
618 }
619 #endif
620
621 #ifdef HAVE_truncsftqf2
622 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
623 {
624 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
625 return;
626 }
627 #endif
628 #ifdef HAVE_truncdftqf2
629 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
630 {
631 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
632 return;
633 }
634 #endif
635 #ifdef HAVE_truncxftqf2
636 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
637 {
638 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
639 return;
640 }
641 #endif
642 #ifdef HAVE_trunctftqf2
643 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
644 {
645 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
646 return;
647 }
648 #endif
649
650 #ifdef HAVE_truncdfsf2
651 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
652 {
653 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
654 return;
655 }
656 #endif
657 #ifdef HAVE_truncxfsf2
658 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
659 {
660 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
661 return;
662 }
663 #endif
664 #ifdef HAVE_trunctfsf2
665 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
666 {
667 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
668 return;
669 }
670 #endif
671 #ifdef HAVE_truncxfdf2
672 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
673 {
674 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
675 return;
676 }
677 #endif
678 #ifdef HAVE_trunctfdf2
679 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
680 {
681 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
682 return;
683 }
684 #endif
685
686 libcall = (rtx) 0;
687 switch (from_mode)
688 {
689 case SFmode:
690 switch (to_mode)
691 {
692 case DFmode:
693 libcall = extendsfdf2_libfunc;
694 break;
695
696 case XFmode:
697 libcall = extendsfxf2_libfunc;
698 break;
699
700 case TFmode:
701 libcall = extendsftf2_libfunc;
702 break;
703
704 default:
705 break;
706 }
707 break;
708
709 case DFmode:
710 switch (to_mode)
711 {
712 case SFmode:
713 libcall = truncdfsf2_libfunc;
714 break;
715
716 case XFmode:
717 libcall = extenddfxf2_libfunc;
718 break;
719
720 case TFmode:
721 libcall = extenddftf2_libfunc;
722 break;
723
724 default:
725 break;
726 }
727 break;
728
729 case XFmode:
730 switch (to_mode)
731 {
732 case SFmode:
733 libcall = truncxfsf2_libfunc;
734 break;
735
736 case DFmode:
737 libcall = truncxfdf2_libfunc;
738 break;
739
740 default:
741 break;
742 }
743 break;
744
745 case TFmode:
746 switch (to_mode)
747 {
748 case SFmode:
749 libcall = trunctfsf2_libfunc;
750 break;
751
752 case DFmode:
753 libcall = trunctfdf2_libfunc;
754 break;
755
756 default:
757 break;
758 }
759 break;
760
761 default:
762 break;
763 }
764
765 if (libcall == (rtx) 0)
766 /* This conversion is not implemented yet. */
767 abort ();
768
769 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
770 1, from, from_mode);
771 emit_move_insn (to, value);
772 return;
773 }
774
775 /* Now both modes are integers. */
776
777 /* Handle expanding beyond a word. */
778 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
779 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
780 {
781 rtx insns;
782 rtx lowpart;
783 rtx fill_value;
784 rtx lowfrom;
785 int i;
786 enum machine_mode lowpart_mode;
787 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
788
789 /* Try converting directly if the insn is supported. */
790 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
791 != CODE_FOR_nothing)
792 {
793 /* If FROM is a SUBREG, put it into a register. Do this
794 so that we always generate the same set of insns for
795 better cse'ing; if an intermediate assignment occurred,
796 we won't be doing the operation directly on the SUBREG. */
797 if (optimize > 0 && GET_CODE (from) == SUBREG)
798 from = force_reg (from_mode, from);
799 emit_unop_insn (code, to, from, equiv_code);
800 return;
801 }
802 /* Next, try converting via full word. */
803 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
804 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
805 != CODE_FOR_nothing))
806 {
807 if (GET_CODE (to) == REG)
808 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
809 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
810 emit_unop_insn (code, to,
811 gen_lowpart (word_mode, to), equiv_code);
812 return;
813 }
814
815 /* No special multiword conversion insn; do it by hand. */
816 start_sequence ();
817
818 /* Since we will turn this into a no conflict block, we must ensure
819 that the source does not overlap the target. */
820
821 if (reg_overlap_mentioned_p (to, from))
822 from = force_reg (from_mode, from);
823
824 /* Get a copy of FROM widened to a word, if necessary. */
825 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
826 lowpart_mode = word_mode;
827 else
828 lowpart_mode = from_mode;
829
830 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
831
832 lowpart = gen_lowpart (lowpart_mode, to);
833 emit_move_insn (lowpart, lowfrom);
834
835 /* Compute the value to put in each remaining word. */
836 if (unsignedp)
837 fill_value = const0_rtx;
838 else
839 {
840 #ifdef HAVE_slt
841 if (HAVE_slt
842 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
843 && STORE_FLAG_VALUE == -1)
844 {
845 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
846 lowpart_mode, 0, 0);
847 fill_value = gen_reg_rtx (word_mode);
848 emit_insn (gen_slt (fill_value));
849 }
850 else
851 #endif
852 {
853 fill_value
854 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
855 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
856 NULL_RTX, 0);
857 fill_value = convert_to_mode (word_mode, fill_value, 1);
858 }
859 }
860
861 /* Fill the remaining words. */
862 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
863 {
864 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
865 rtx subword = operand_subword (to, index, 1, to_mode);
866
867 if (subword == 0)
868 abort ();
869
870 if (fill_value != subword)
871 emit_move_insn (subword, fill_value);
872 }
873
874 insns = get_insns ();
875 end_sequence ();
876
877 emit_no_conflict_block (insns, to, from, NULL_RTX,
878 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
879 return;
880 }
881
882 /* Truncating multi-word to a word or less. */
883 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
884 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
885 {
886 if (!((GET_CODE (from) == MEM
887 && ! MEM_VOLATILE_P (from)
888 && direct_load[(int) to_mode]
889 && ! mode_dependent_address_p (XEXP (from, 0)))
890 || GET_CODE (from) == REG
891 || GET_CODE (from) == SUBREG))
892 from = force_reg (from_mode, from);
893 convert_move (to, gen_lowpart (word_mode, from), 0);
894 return;
895 }
896
897 /* Handle pointer conversion */ /* SPEE 900220 */
898 if (to_mode == PQImode)
899 {
900 if (from_mode != QImode)
901 from = convert_to_mode (QImode, from, unsignedp);
902
903 #ifdef HAVE_truncqipqi2
904 if (HAVE_truncqipqi2)
905 {
906 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
907 return;
908 }
909 #endif /* HAVE_truncqipqi2 */
910 abort ();
911 }
912
913 if (from_mode == PQImode)
914 {
915 if (to_mode != QImode)
916 {
917 from = convert_to_mode (QImode, from, unsignedp);
918 from_mode = QImode;
919 }
920 else
921 {
922 #ifdef HAVE_extendpqiqi2
923 if (HAVE_extendpqiqi2)
924 {
925 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
926 return;
927 }
928 #endif /* HAVE_extendpqiqi2 */
929 abort ();
930 }
931 }
932
933 if (to_mode == PSImode)
934 {
935 if (from_mode != SImode)
936 from = convert_to_mode (SImode, from, unsignedp);
937
938 #ifdef HAVE_truncsipsi2
939 if (HAVE_truncsipsi2)
940 {
941 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
942 return;
943 }
944 #endif /* HAVE_truncsipsi2 */
945 abort ();
946 }
947
948 if (from_mode == PSImode)
949 {
950 if (to_mode != SImode)
951 {
952 from = convert_to_mode (SImode, from, unsignedp);
953 from_mode = SImode;
954 }
955 else
956 {
957 #ifdef HAVE_extendpsisi2
958 if (HAVE_extendpsisi2)
959 {
960 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
961 return;
962 }
963 #endif /* HAVE_extendpsisi2 */
964 abort ();
965 }
966 }
967
968 if (to_mode == PDImode)
969 {
970 if (from_mode != DImode)
971 from = convert_to_mode (DImode, from, unsignedp);
972
973 #ifdef HAVE_truncdipdi2
974 if (HAVE_truncdipdi2)
975 {
976 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
977 return;
978 }
979 #endif /* HAVE_truncdipdi2 */
980 abort ();
981 }
982
983 if (from_mode == PDImode)
984 {
985 if (to_mode != DImode)
986 {
987 from = convert_to_mode (DImode, from, unsignedp);
988 from_mode = DImode;
989 }
990 else
991 {
992 #ifdef HAVE_extendpdidi2
993 if (HAVE_extendpdidi2)
994 {
995 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
996 return;
997 }
998 #endif /* HAVE_extendpdidi2 */
999 abort ();
1000 }
1001 }
1002
1003 /* Now follow all the conversions between integers
1004 no more than a word long. */
1005
1006 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1007 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1008 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1009 GET_MODE_BITSIZE (from_mode)))
1010 {
1011 if (!((GET_CODE (from) == MEM
1012 && ! MEM_VOLATILE_P (from)
1013 && direct_load[(int) to_mode]
1014 && ! mode_dependent_address_p (XEXP (from, 0)))
1015 || GET_CODE (from) == REG
1016 || GET_CODE (from) == SUBREG))
1017 from = force_reg (from_mode, from);
1018 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1019 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1020 from = copy_to_reg (from);
1021 emit_move_insn (to, gen_lowpart (to_mode, from));
1022 return;
1023 }
1024
1025 /* Handle extension. */
1026 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1027 {
1028 /* Convert directly if that works. */
1029 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1030 != CODE_FOR_nothing)
1031 {
1032 emit_unop_insn (code, to, from, equiv_code);
1033 return;
1034 }
1035 else
1036 {
1037 enum machine_mode intermediate;
1038 rtx tmp;
1039 tree shift_amount;
1040
1041 /* Search for a mode to convert via. */
1042 for (intermediate = from_mode; intermediate != VOIDmode;
1043 intermediate = GET_MODE_WIDER_MODE (intermediate))
1044 if (((can_extend_p (to_mode, intermediate, unsignedp)
1045 != CODE_FOR_nothing)
1046 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (intermediate))))
1049 && (can_extend_p (intermediate, from_mode, unsignedp)
1050 != CODE_FOR_nothing))
1051 {
1052 convert_move (to, convert_to_mode (intermediate, from,
1053 unsignedp), unsignedp);
1054 return;
1055 }
1056
1057 /* No suitable intermediate mode.
1058 Generate what we need with shifts. */
1059 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1060 - GET_MODE_BITSIZE (from_mode), 0);
1061 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1062 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1063 to, unsignedp);
1064 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1065 to, unsignedp);
1066 if (tmp != to)
1067 emit_move_insn (to, tmp);
1068 return;
1069 }
1070 }
1071
1072 /* Support special truncate insns for certain modes. */
1073
1074 if (from_mode == DImode && to_mode == SImode)
1075 {
1076 #ifdef HAVE_truncdisi2
1077 if (HAVE_truncdisi2)
1078 {
1079 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1080 return;
1081 }
1082 #endif
1083 convert_move (to, force_reg (from_mode, from), unsignedp);
1084 return;
1085 }
1086
1087 if (from_mode == DImode && to_mode == HImode)
1088 {
1089 #ifdef HAVE_truncdihi2
1090 if (HAVE_truncdihi2)
1091 {
1092 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1093 return;
1094 }
1095 #endif
1096 convert_move (to, force_reg (from_mode, from), unsignedp);
1097 return;
1098 }
1099
1100 if (from_mode == DImode && to_mode == QImode)
1101 {
1102 #ifdef HAVE_truncdiqi2
1103 if (HAVE_truncdiqi2)
1104 {
1105 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1106 return;
1107 }
1108 #endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1111 }
1112
1113 if (from_mode == SImode && to_mode == HImode)
1114 {
1115 #ifdef HAVE_truncsihi2
1116 if (HAVE_truncsihi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1119 return;
1120 }
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == SImode && to_mode == QImode)
1127 {
1128 #ifdef HAVE_truncsiqi2
1129 if (HAVE_truncsiqi2)
1130 {
1131 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1132 return;
1133 }
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == HImode && to_mode == QImode)
1140 {
1141 #ifdef HAVE_trunchiqi2
1142 if (HAVE_trunchiqi2)
1143 {
1144 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1145 return;
1146 }
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == TImode && to_mode == DImode)
1153 {
1154 #ifdef HAVE_trunctidi2
1155 if (HAVE_trunctidi2)
1156 {
1157 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1158 return;
1159 }
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == TImode && to_mode == SImode)
1166 {
1167 #ifdef HAVE_trunctisi2
1168 if (HAVE_trunctisi2)
1169 {
1170 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1171 return;
1172 }
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
1178 if (from_mode == TImode && to_mode == HImode)
1179 {
1180 #ifdef HAVE_trunctihi2
1181 if (HAVE_trunctihi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1184 return;
1185 }
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
1191 if (from_mode == TImode && to_mode == QImode)
1192 {
1193 #ifdef HAVE_trunctiqi2
1194 if (HAVE_trunctiqi2)
1195 {
1196 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1197 return;
1198 }
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 /* Handle truncation of volatile memrefs, and so on;
1205 the things that couldn't be truncated directly,
1206 and for which there was no special instruction. */
1207 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1208 {
1209 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1210 emit_move_insn (to, temp);
1211 return;
1212 }
1213
1214 /* Mode combination is not recognized. */
1215 abort ();
1216 }
1217
1218 /* Return an rtx for a value that would result
1219 from converting X to mode MODE.
1220 Both X and MODE may be floating, or both integer.
1221 UNSIGNEDP is nonzero if X is an unsigned value.
1222 This can be done by referring to a part of X in place
1223 or by copying to a new temporary with conversion.
1224
1225 This function *must not* call protect_from_queue
1226 except when putting X into an insn (in which case convert_move does it). */
1227
1228 rtx
1229 convert_to_mode (mode, x, unsignedp)
1230 enum machine_mode mode;
1231 rtx x;
1232 int unsignedp;
1233 {
1234 return convert_modes (mode, VOIDmode, x, unsignedp);
1235 }
1236
1237 /* Return an rtx for a value that would result
1238 from converting X from mode OLDMODE to mode MODE.
1239 Both modes may be floating, or both integer.
1240 UNSIGNEDP is nonzero if X is an unsigned value.
1241
1242 This can be done by referring to a part of X in place
1243 or by copying to a new temporary with conversion.
1244
1245 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1246
1247 This function *must not* call protect_from_queue
1248 except when putting X into an insn (in which case convert_move does it). */
1249
1250 rtx
1251 convert_modes (mode, oldmode, x, unsignedp)
1252 enum machine_mode mode, oldmode;
1253 rtx x;
1254 int unsignedp;
1255 {
1256 register rtx temp;
1257
1258 /* If FROM is a SUBREG that indicates that we have already done at least
1259 the required extension, strip it. */
1260
1261 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1262 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1263 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1264 x = gen_lowpart (mode, x);
1265
1266 if (GET_MODE (x) != VOIDmode)
1267 oldmode = GET_MODE (x);
1268
1269 if (mode == oldmode)
1270 return x;
1271
1272 /* There is one case that we must handle specially: If we are converting
1273 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1274 we are to interpret the constant as unsigned, gen_lowpart will do
1275 the wrong if the constant appears negative. What we want to do is
1276 make the high-order word of the constant zero, not all ones. */
1277
1278 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1279 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1280 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1281 {
1282 HOST_WIDE_INT val = INTVAL (x);
1283
1284 if (oldmode != VOIDmode
1285 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1286 {
1287 int width = GET_MODE_BITSIZE (oldmode);
1288
1289 /* We need to zero extend VAL. */
1290 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1291 }
1292
1293 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1294 }
1295
1296 /* We can do this with a gen_lowpart if both desired and current modes
1297 are integer, and this is either a constant integer, a register, or a
1298 non-volatile MEM. Except for the constant case where MODE is no
1299 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1300
1301 if ((GET_CODE (x) == CONST_INT
1302 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1303 || (GET_MODE_CLASS (mode) == MODE_INT
1304 && GET_MODE_CLASS (oldmode) == MODE_INT
1305 && (GET_CODE (x) == CONST_DOUBLE
1306 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1307 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1308 && direct_load[(int) mode])
1309 || (GET_CODE (x) == REG
1310 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1311 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1312 {
1313 /* ?? If we don't know OLDMODE, we have to assume here that
1314 X does not need sign- or zero-extension. This may not be
1315 the case, but it's the best we can do. */
1316 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1317 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1318 {
1319 HOST_WIDE_INT val = INTVAL (x);
1320 int width = GET_MODE_BITSIZE (oldmode);
1321
1322 /* We must sign or zero-extend in this case. Start by
1323 zero-extending, then sign extend if we need to. */
1324 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1325 if (! unsignedp
1326 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1327 val |= (HOST_WIDE_INT) (-1) << width;
1328
1329 return GEN_INT (val);
1330 }
1331
1332 return gen_lowpart (mode, x);
1333 }
1334
1335 temp = gen_reg_rtx (mode);
1336 convert_move (temp, x, unsignedp);
1337 return temp;
1338 }
1339 \f
1340
1341 /* This macro is used to determine what the largest unit size that
1342 move_by_pieces can use is. */
1343
1344 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1345 move efficiently, as opposed to MOVE_MAX which is the maximum
1346 number of bhytes we can move with a single instruction. */
1347
1348 #ifndef MOVE_MAX_PIECES
1349 #define MOVE_MAX_PIECES MOVE_MAX
1350 #endif
1351
1352 /* Generate several move instructions to copy LEN bytes
1353 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1354 The caller must pass FROM and TO
1355 through protect_from_queue before calling.
1356 ALIGN (in bytes) is maximum alignment we can assume. */
1357
1358 void
1359 move_by_pieces (to, from, len, align)
1360 rtx to, from;
1361 int len, align;
1362 {
1363 struct move_by_pieces data;
1364 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1365 int max_size = MOVE_MAX_PIECES + 1;
1366 enum machine_mode mode = VOIDmode, tmode;
1367 enum insn_code icode;
1368
1369 data.offset = 0;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1372 data.to = to;
1373 data.from = from;
1374 data.autinc_to
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 data.autinc_from
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1381
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1384 data.reverse
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1387 data.len = len;
1388
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1391 data.to_readonly = RTX_UNCHANGING_P (to);
1392 data.from_readonly = RTX_UNCHANGING_P (from);
1393
1394 /* If copying requires more than two move insns,
1395 copy addresses to registers (to make displacements shorter)
1396 and use post-increment if available. */
1397 if (!(data.autinc_from && data.autinc_to)
1398 && move_by_pieces_ninsns (len, align) > 2)
1399 {
1400 /* Find the mode of the largest move... */
1401 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1402 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1403 if (GET_MODE_SIZE (tmode) < max_size)
1404 mode = tmode;
1405
1406 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1407 {
1408 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1409 data.autinc_from = 1;
1410 data.explicit_inc_from = -1;
1411 }
1412 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1413 {
1414 data.from_addr = copy_addr_to_reg (from_addr);
1415 data.autinc_from = 1;
1416 data.explicit_inc_from = 1;
1417 }
1418 if (!data.autinc_from && CONSTANT_P (from_addr))
1419 data.from_addr = copy_addr_to_reg (from_addr);
1420 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1421 {
1422 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1423 data.autinc_to = 1;
1424 data.explicit_inc_to = -1;
1425 }
1426 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1427 {
1428 data.to_addr = copy_addr_to_reg (to_addr);
1429 data.autinc_to = 1;
1430 data.explicit_inc_to = 1;
1431 }
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 }
1435
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1438 align = MOVE_MAX;
1439
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1442
1443 while (max_size > 1)
1444 {
1445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1447 if (GET_MODE_SIZE (tmode) < max_size)
1448 mode = tmode;
1449
1450 if (mode == VOIDmode)
1451 break;
1452
1453 icode = mov_optab->handlers[(int) mode].insn_code;
1454 if (icode != CODE_FOR_nothing
1455 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1456 GET_MODE_SIZE (mode)))
1457 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1458
1459 max_size = GET_MODE_SIZE (mode);
1460 }
1461
1462 /* The code above should have handled everything. */
1463 if (data.len > 0)
1464 abort ();
1465 }
1466
1467 /* Return number of insns required to move L bytes by pieces.
1468 ALIGN (in bytes) is maximum alignment we can assume. */
1469
1470 static int
1471 move_by_pieces_ninsns (l, align)
1472 unsigned int l;
1473 int align;
1474 {
1475 register int n_insns = 0;
1476 int max_size = MOVE_MAX + 1;
1477
1478 if (! SLOW_UNALIGNED_ACCESS
1479 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1480 align = MOVE_MAX;
1481
1482 while (max_size > 1)
1483 {
1484 enum machine_mode mode = VOIDmode, tmode;
1485 enum insn_code icode;
1486
1487 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1488 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1489 if (GET_MODE_SIZE (tmode) < max_size)
1490 mode = tmode;
1491
1492 if (mode == VOIDmode)
1493 break;
1494
1495 icode = mov_optab->handlers[(int) mode].insn_code;
1496 if (icode != CODE_FOR_nothing
1497 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1498 GET_MODE_SIZE (mode)))
1499 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1500
1501 max_size = GET_MODE_SIZE (mode);
1502 }
1503
1504 return n_insns;
1505 }
1506
1507 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1508 with move instructions for mode MODE. GENFUN is the gen_... function
1509 to make a move insn for that mode. DATA has all the other info. */
1510
1511 static void
1512 move_by_pieces_1 (genfun, mode, data)
1513 rtx (*genfun) PROTO ((rtx, ...));
1514 enum machine_mode mode;
1515 struct move_by_pieces *data;
1516 {
1517 register int size = GET_MODE_SIZE (mode);
1518 register rtx to1, from1;
1519
1520 while (data->len >= size)
1521 {
1522 if (data->reverse) data->offset -= size;
1523
1524 to1 = (data->autinc_to
1525 ? gen_rtx_MEM (mode, data->to_addr)
1526 : copy_rtx (change_address (data->to, mode,
1527 plus_constant (data->to_addr,
1528 data->offset))));
1529 MEM_IN_STRUCT_P (to1) = data->to_struct;
1530 RTX_UNCHANGING_P (to1) = data->to_readonly;
1531
1532 from1
1533 = (data->autinc_from
1534 ? gen_rtx_MEM (mode, data->from_addr)
1535 : copy_rtx (change_address (data->from, mode,
1536 plus_constant (data->from_addr,
1537 data->offset))));
1538 MEM_IN_STRUCT_P (from1) = data->from_struct;
1539 RTX_UNCHANGING_P (from1) = data->from_readonly;
1540
1541 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1542 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1543 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1544 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1545
1546 emit_insn ((*genfun) (to1, from1));
1547 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1548 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1549 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1550 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1551
1552 if (! data->reverse) data->offset += size;
1553
1554 data->len -= size;
1555 }
1556 }
1557 \f
1558 /* Emit code to move a block Y to a block X.
1559 This may be done with string-move instructions,
1560 with multiple scalar move instructions, or with a library call.
1561
1562 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1563 with mode BLKmode.
1564 SIZE is an rtx that says how long they are.
1565 ALIGN is the maximum alignment we can assume they have,
1566 measured in bytes.
1567
1568 Return the address of the new block, if memcpy is called and returns it,
1569 0 otherwise. */
1570
1571 rtx
1572 emit_block_move (x, y, size, align)
1573 rtx x, y;
1574 rtx size;
1575 int align;
1576 {
1577 rtx retval = 0;
1578 #ifdef TARGET_MEM_FUNCTIONS
1579 static tree fn;
1580 tree call_expr, arg_list;
1581 #endif
1582
1583 if (GET_MODE (x) != BLKmode)
1584 abort ();
1585
1586 if (GET_MODE (y) != BLKmode)
1587 abort ();
1588
1589 x = protect_from_queue (x, 1);
1590 y = protect_from_queue (y, 0);
1591 size = protect_from_queue (size, 0);
1592
1593 if (GET_CODE (x) != MEM)
1594 abort ();
1595 if (GET_CODE (y) != MEM)
1596 abort ();
1597 if (size == 0)
1598 abort ();
1599
1600 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1601 move_by_pieces (x, y, INTVAL (size), align);
1602 else
1603 {
1604 /* Try the most limited insn first, because there's no point
1605 including more than one in the machine description unless
1606 the more limited one has some advantage. */
1607
1608 rtx opalign = GEN_INT (align);
1609 enum machine_mode mode;
1610
1611 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1612 mode = GET_MODE_WIDER_MODE (mode))
1613 {
1614 enum insn_code code = movstr_optab[(int) mode];
1615 insn_operand_predicate_fn pred;
1616
1617 if (code != CODE_FOR_nothing
1618 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1619 here because if SIZE is less than the mode mask, as it is
1620 returned by the macro, it will definitely be less than the
1621 actual mode mask. */
1622 && ((GET_CODE (size) == CONST_INT
1623 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1624 <= (GET_MODE_MASK (mode) >> 1)))
1625 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1626 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1627 || (*pred) (x, BLKmode))
1628 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1629 || (*pred) (y, BLKmode))
1630 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1631 || (*pred) (opalign, VOIDmode)))
1632 {
1633 rtx op2;
1634 rtx last = get_last_insn ();
1635 rtx pat;
1636
1637 op2 = convert_to_mode (mode, size, 1);
1638 pred = insn_data[(int) code].operand[2].predicate;
1639 if (pred != 0 && ! (*pred) (op2, mode))
1640 op2 = copy_to_mode_reg (mode, op2);
1641
1642 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1643 if (pat)
1644 {
1645 emit_insn (pat);
1646 return 0;
1647 }
1648 else
1649 delete_insns_since (last);
1650 }
1651 }
1652
1653 /* X, Y, or SIZE may have been passed through protect_from_queue.
1654
1655 It is unsafe to save the value generated by protect_from_queue
1656 and reuse it later. Consider what happens if emit_queue is
1657 called before the return value from protect_from_queue is used.
1658
1659 Expansion of the CALL_EXPR below will call emit_queue before
1660 we are finished emitting RTL for argument setup. So if we are
1661 not careful we could get the wrong value for an argument.
1662
1663 To avoid this problem we go ahead and emit code to copy X, Y &
1664 SIZE into new pseudos. We can then place those new pseudos
1665 into an RTL_EXPR and use them later, even after a call to
1666 emit_queue.
1667
1668 Note this is not strictly needed for library calls since they
1669 do not call emit_queue before loading their arguments. However,
1670 we may need to have library calls call emit_queue in the future
1671 since failing to do so could cause problems for targets which
1672 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1673 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1674 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1675
1676 #ifdef TARGET_MEM_FUNCTIONS
1677 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1678 #else
1679 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1680 TREE_UNSIGNED (integer_type_node));
1681 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1682 #endif
1683
1684 #ifdef TARGET_MEM_FUNCTIONS
1685 /* It is incorrect to use the libcall calling conventions to call
1686 memcpy in this context.
1687
1688 This could be a user call to memcpy and the user may wish to
1689 examine the return value from memcpy.
1690
1691 For targets where libcalls and normal calls have different conventions
1692 for returning pointers, we could end up generating incorrect code.
1693
1694 So instead of using a libcall sequence we build up a suitable
1695 CALL_EXPR and expand the call in the normal fashion. */
1696 if (fn == NULL_TREE)
1697 {
1698 tree fntype;
1699
1700 /* This was copied from except.c, I don't know if all this is
1701 necessary in this context or not. */
1702 fn = get_identifier ("memcpy");
1703 push_obstacks_nochange ();
1704 end_temporary_allocation ();
1705 fntype = build_pointer_type (void_type_node);
1706 fntype = build_function_type (fntype, NULL_TREE);
1707 fn = build_decl (FUNCTION_DECL, fn, fntype);
1708 ggc_add_tree_root (&fn, 1);
1709 DECL_EXTERNAL (fn) = 1;
1710 TREE_PUBLIC (fn) = 1;
1711 DECL_ARTIFICIAL (fn) = 1;
1712 make_decl_rtl (fn, NULL_PTR, 1);
1713 assemble_external (fn);
1714 pop_obstacks ();
1715 }
1716
1717 /* We need to make an argument list for the function call.
1718
1719 memcpy has three arguments, the first two are void * addresses and
1720 the last is a size_t byte count for the copy. */
1721 arg_list
1722 = build_tree_list (NULL_TREE,
1723 make_tree (build_pointer_type (void_type_node), x));
1724 TREE_CHAIN (arg_list)
1725 = build_tree_list (NULL_TREE,
1726 make_tree (build_pointer_type (void_type_node), y));
1727 TREE_CHAIN (TREE_CHAIN (arg_list))
1728 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1729 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1730
1731 /* Now we have to build up the CALL_EXPR itself. */
1732 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1733 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1734 call_expr, arg_list, NULL_TREE);
1735 TREE_SIDE_EFFECTS (call_expr) = 1;
1736
1737 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1738 #else
1739 emit_library_call (bcopy_libfunc, 0,
1740 VOIDmode, 3, y, Pmode, x, Pmode,
1741 convert_to_mode (TYPE_MODE (integer_type_node), size,
1742 TREE_UNSIGNED (integer_type_node)),
1743 TYPE_MODE (integer_type_node));
1744 #endif
1745 }
1746
1747 return retval;
1748 }
1749 \f
1750 /* Copy all or part of a value X into registers starting at REGNO.
1751 The number of registers to be filled is NREGS. */
1752
1753 void
1754 move_block_to_reg (regno, x, nregs, mode)
1755 int regno;
1756 rtx x;
1757 int nregs;
1758 enum machine_mode mode;
1759 {
1760 int i;
1761 #ifdef HAVE_load_multiple
1762 rtx pat;
1763 rtx last;
1764 #endif
1765
1766 if (nregs == 0)
1767 return;
1768
1769 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1770 x = validize_mem (force_const_mem (mode, x));
1771
1772 /* See if the machine can do this with a load multiple insn. */
1773 #ifdef HAVE_load_multiple
1774 if (HAVE_load_multiple)
1775 {
1776 last = get_last_insn ();
1777 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1778 GEN_INT (nregs));
1779 if (pat)
1780 {
1781 emit_insn (pat);
1782 return;
1783 }
1784 else
1785 delete_insns_since (last);
1786 }
1787 #endif
1788
1789 for (i = 0; i < nregs; i++)
1790 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1791 operand_subword_force (x, i, mode));
1792 }
1793
1794 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1795 The number of registers to be filled is NREGS. SIZE indicates the number
1796 of bytes in the object X. */
1797
1798
1799 void
1800 move_block_from_reg (regno, x, nregs, size)
1801 int regno;
1802 rtx x;
1803 int nregs;
1804 int size;
1805 {
1806 int i;
1807 #ifdef HAVE_store_multiple
1808 rtx pat;
1809 rtx last;
1810 #endif
1811 enum machine_mode mode;
1812
1813 /* If SIZE is that of a mode no bigger than a word, just use that
1814 mode's store operation. */
1815 if (size <= UNITS_PER_WORD
1816 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1817 {
1818 emit_move_insn (change_address (x, mode, NULL),
1819 gen_rtx_REG (mode, regno));
1820 return;
1821 }
1822
1823 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1824 to the left before storing to memory. Note that the previous test
1825 doesn't handle all cases (e.g. SIZE == 3). */
1826 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1827 {
1828 rtx tem = operand_subword (x, 0, 1, BLKmode);
1829 rtx shift;
1830
1831 if (tem == 0)
1832 abort ();
1833
1834 shift = expand_shift (LSHIFT_EXPR, word_mode,
1835 gen_rtx_REG (word_mode, regno),
1836 build_int_2 ((UNITS_PER_WORD - size)
1837 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1838 emit_move_insn (tem, shift);
1839 return;
1840 }
1841
1842 /* See if the machine can do this with a store multiple insn. */
1843 #ifdef HAVE_store_multiple
1844 if (HAVE_store_multiple)
1845 {
1846 last = get_last_insn ();
1847 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1848 GEN_INT (nregs));
1849 if (pat)
1850 {
1851 emit_insn (pat);
1852 return;
1853 }
1854 else
1855 delete_insns_since (last);
1856 }
1857 #endif
1858
1859 for (i = 0; i < nregs; i++)
1860 {
1861 rtx tem = operand_subword (x, i, 1, BLKmode);
1862
1863 if (tem == 0)
1864 abort ();
1865
1866 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1867 }
1868 }
1869
1870 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1871 registers represented by a PARALLEL. SSIZE represents the total size of
1872 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1873 SRC in bits. */
1874 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1875 the balance will be in what would be the low-order memory addresses, i.e.
1876 left justified for big endian, right justified for little endian. This
1877 happens to be true for the targets currently using this support. If this
1878 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1879 would be needed. */
1880
1881 void
1882 emit_group_load (dst, orig_src, ssize, align)
1883 rtx dst, orig_src;
1884 int align, ssize;
1885 {
1886 rtx *tmps, src;
1887 int start, i;
1888
1889 if (GET_CODE (dst) != PARALLEL)
1890 abort ();
1891
1892 /* Check for a NULL entry, used to indicate that the parameter goes
1893 both on the stack and in registers. */
1894 if (XEXP (XVECEXP (dst, 0, 0), 0))
1895 start = 0;
1896 else
1897 start = 1;
1898
1899 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1900
1901 /* If we won't be loading directly from memory, protect the real source
1902 from strange tricks we might play. */
1903 src = orig_src;
1904 if (GET_CODE (src) != MEM)
1905 {
1906 src = gen_reg_rtx (GET_MODE (orig_src));
1907 emit_move_insn (src, orig_src);
1908 }
1909
1910 /* Process the pieces. */
1911 for (i = start; i < XVECLEN (dst, 0); i++)
1912 {
1913 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1914 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1915 int bytelen = GET_MODE_SIZE (mode);
1916 int shift = 0;
1917
1918 /* Handle trailing fragments that run over the size of the struct. */
1919 if (ssize >= 0 && bytepos + bytelen > ssize)
1920 {
1921 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1922 bytelen = ssize - bytepos;
1923 if (bytelen <= 0)
1924 abort();
1925 }
1926
1927 /* Optimize the access just a bit. */
1928 if (GET_CODE (src) == MEM
1929 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1930 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1931 && bytelen == GET_MODE_SIZE (mode))
1932 {
1933 tmps[i] = gen_reg_rtx (mode);
1934 emit_move_insn (tmps[i],
1935 change_address (src, mode,
1936 plus_constant (XEXP (src, 0),
1937 bytepos)));
1938 }
1939 else if (GET_CODE (src) == CONCAT)
1940 {
1941 if (bytepos == 0
1942 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1943 tmps[i] = XEXP (src, 0);
1944 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1945 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1946 tmps[i] = XEXP (src, 1);
1947 else
1948 abort ();
1949 }
1950 else
1951 {
1952 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1953 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1954 mode, mode, align, ssize);
1955 }
1956
1957 if (BYTES_BIG_ENDIAN && shift)
1958 {
1959 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1960 tmps[i], 0, OPTAB_WIDEN);
1961 }
1962 }
1963 emit_queue();
1964
1965 /* Copy the extracted pieces into the proper (probable) hard regs. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1967 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1968 }
1969
1970 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1971 registers represented by a PARALLEL. SSIZE represents the total size of
1972 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1973
1974 void
1975 emit_group_store (orig_dst, src, ssize, align)
1976 rtx orig_dst, src;
1977 int ssize, align;
1978 {
1979 rtx *tmps, dst;
1980 int start, i;
1981
1982 if (GET_CODE (src) != PARALLEL)
1983 abort ();
1984
1985 /* Check for a NULL entry, used to indicate that the parameter goes
1986 both on the stack and in registers. */
1987 if (XEXP (XVECEXP (src, 0, 0), 0))
1988 start = 0;
1989 else
1990 start = 1;
1991
1992 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1993
1994 /* Copy the (probable) hard regs into pseudos. */
1995 for (i = start; i < XVECLEN (src, 0); i++)
1996 {
1997 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1998 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1999 emit_move_insn (tmps[i], reg);
2000 }
2001 emit_queue();
2002
2003 /* If we won't be storing directly into memory, protect the real destination
2004 from strange tricks we might play. */
2005 dst = orig_dst;
2006 if (GET_CODE (dst) == PARALLEL)
2007 {
2008 rtx temp;
2009
2010 /* We can get a PARALLEL dst if there is a conditional expression in
2011 a return statement. In that case, the dst and src are the same,
2012 so no action is necessary. */
2013 if (rtx_equal_p (dst, src))
2014 return;
2015
2016 /* It is unclear if we can ever reach here, but we may as well handle
2017 it. Allocate a temporary, and split this into a store/load to/from
2018 the temporary. */
2019
2020 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2021 emit_group_store (temp, src, ssize, align);
2022 emit_group_load (dst, temp, ssize, align);
2023 return;
2024 }
2025 else if (GET_CODE (dst) != MEM)
2026 {
2027 dst = gen_reg_rtx (GET_MODE (orig_dst));
2028 /* Make life a bit easier for combine. */
2029 emit_move_insn (dst, const0_rtx);
2030 }
2031 else if (! MEM_IN_STRUCT_P (dst))
2032 {
2033 /* store_bit_field requires that memory operations have
2034 mem_in_struct_p set; we might not. */
2035
2036 dst = copy_rtx (orig_dst);
2037 MEM_SET_IN_STRUCT_P (dst, 1);
2038 }
2039
2040 /* Process the pieces. */
2041 for (i = start; i < XVECLEN (src, 0); i++)
2042 {
2043 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2044 enum machine_mode mode = GET_MODE (tmps[i]);
2045 int bytelen = GET_MODE_SIZE (mode);
2046
2047 /* Handle trailing fragments that run over the size of the struct. */
2048 if (ssize >= 0 && bytepos + bytelen > ssize)
2049 {
2050 if (BYTES_BIG_ENDIAN)
2051 {
2052 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2053 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2054 tmps[i], 0, OPTAB_WIDEN);
2055 }
2056 bytelen = ssize - bytepos;
2057 }
2058
2059 /* Optimize the access just a bit. */
2060 if (GET_CODE (dst) == MEM
2061 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2062 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2063 && bytelen == GET_MODE_SIZE (mode))
2064 {
2065 emit_move_insn (change_address (dst, mode,
2066 plus_constant (XEXP (dst, 0),
2067 bytepos)),
2068 tmps[i]);
2069 }
2070 else
2071 {
2072 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2073 mode, tmps[i], align, ssize);
2074 }
2075 }
2076 emit_queue();
2077
2078 /* Copy from the pseudo into the (probable) hard reg. */
2079 if (GET_CODE (dst) == REG)
2080 emit_move_insn (orig_dst, dst);
2081 }
2082
2083 /* Generate code to copy a BLKmode object of TYPE out of a
2084 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2085 is null, a stack temporary is created. TGTBLK is returned.
2086
2087 The primary purpose of this routine is to handle functions
2088 that return BLKmode structures in registers. Some machines
2089 (the PA for example) want to return all small structures
2090 in registers regardless of the structure's alignment.
2091 */
2092
2093 rtx
2094 copy_blkmode_from_reg(tgtblk,srcreg,type)
2095 rtx tgtblk;
2096 rtx srcreg;
2097 tree type;
2098 {
2099 int bytes = int_size_in_bytes (type);
2100 rtx src = NULL, dst = NULL;
2101 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2102 int bitpos, xbitpos, big_endian_correction = 0;
2103
2104 if (tgtblk == 0)
2105 {
2106 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2107 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2108 preserve_temp_slots (tgtblk);
2109 }
2110
2111 /* This code assumes srcreg is at least a full word. If it isn't,
2112 copy it into a new pseudo which is a full word. */
2113 if (GET_MODE (srcreg) != BLKmode
2114 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2115 srcreg = convert_to_mode (word_mode, srcreg,
2116 TREE_UNSIGNED (type));
2117
2118 /* Structures whose size is not a multiple of a word are aligned
2119 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2120 machine, this means we must skip the empty high order bytes when
2121 calculating the bit offset. */
2122 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2123 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2124 * BITS_PER_UNIT));
2125
2126 /* Copy the structure BITSIZE bites at a time.
2127
2128 We could probably emit more efficient code for machines
2129 which do not use strict alignment, but it doesn't seem
2130 worth the effort at the current time. */
2131 for (bitpos = 0, xbitpos = big_endian_correction;
2132 bitpos < bytes * BITS_PER_UNIT;
2133 bitpos += bitsize, xbitpos += bitsize)
2134 {
2135
2136 /* We need a new source operand each time xbitpos is on a
2137 word boundary and when xbitpos == big_endian_correction
2138 (the first time through). */
2139 if (xbitpos % BITS_PER_WORD == 0
2140 || xbitpos == big_endian_correction)
2141 src = operand_subword_force (srcreg,
2142 xbitpos / BITS_PER_WORD,
2143 BLKmode);
2144
2145 /* We need a new destination operand each time bitpos is on
2146 a word boundary. */
2147 if (bitpos % BITS_PER_WORD == 0)
2148 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2149
2150 /* Use xbitpos for the source extraction (right justified) and
2151 xbitpos for the destination store (left justified). */
2152 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2153 extract_bit_field (src, bitsize,
2154 xbitpos % BITS_PER_WORD, 1,
2155 NULL_RTX, word_mode,
2156 word_mode,
2157 bitsize / BITS_PER_UNIT,
2158 BITS_PER_WORD),
2159 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2160 }
2161 return tgtblk;
2162 }
2163
2164
2165 /* Add a USE expression for REG to the (possibly empty) list pointed
2166 to by CALL_FUSAGE. REG must denote a hard register. */
2167
2168 void
2169 use_reg (call_fusage, reg)
2170 rtx *call_fusage, reg;
2171 {
2172 if (GET_CODE (reg) != REG
2173 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2174 abort();
2175
2176 *call_fusage
2177 = gen_rtx_EXPR_LIST (VOIDmode,
2178 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2179 }
2180
2181 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2182 starting at REGNO. All of these registers must be hard registers. */
2183
2184 void
2185 use_regs (call_fusage, regno, nregs)
2186 rtx *call_fusage;
2187 int regno;
2188 int nregs;
2189 {
2190 int i;
2191
2192 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2193 abort ();
2194
2195 for (i = 0; i < nregs; i++)
2196 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2197 }
2198
2199 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2200 PARALLEL REGS. This is for calls that pass values in multiple
2201 non-contiguous locations. The Irix 6 ABI has examples of this. */
2202
2203 void
2204 use_group_regs (call_fusage, regs)
2205 rtx *call_fusage;
2206 rtx regs;
2207 {
2208 int i;
2209
2210 for (i = 0; i < XVECLEN (regs, 0); i++)
2211 {
2212 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2213
2214 /* A NULL entry means the parameter goes both on the stack and in
2215 registers. This can also be a MEM for targets that pass values
2216 partially on the stack and partially in registers. */
2217 if (reg != 0 && GET_CODE (reg) == REG)
2218 use_reg (call_fusage, reg);
2219 }
2220 }
2221 \f
2222 /* Generate several move instructions to clear LEN bytes of block TO.
2223 (A MEM rtx with BLKmode). The caller must pass TO through
2224 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2225 we can assume. */
2226
2227 static void
2228 clear_by_pieces (to, len, align)
2229 rtx to;
2230 int len, align;
2231 {
2232 struct clear_by_pieces data;
2233 rtx to_addr = XEXP (to, 0);
2234 int max_size = MOVE_MAX_PIECES + 1;
2235 enum machine_mode mode = VOIDmode, tmode;
2236 enum insn_code icode;
2237
2238 data.offset = 0;
2239 data.to_addr = to_addr;
2240 data.to = to;
2241 data.autinc_to
2242 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2243 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2244
2245 data.explicit_inc_to = 0;
2246 data.reverse
2247 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2248 if (data.reverse) data.offset = len;
2249 data.len = len;
2250
2251 data.to_struct = MEM_IN_STRUCT_P (to);
2252
2253 /* If copying requires more than two move insns,
2254 copy addresses to registers (to make displacements shorter)
2255 and use post-increment if available. */
2256 if (!data.autinc_to
2257 && move_by_pieces_ninsns (len, align) > 2)
2258 {
2259 /* Determine the main mode we'll be using */
2260 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2261 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2262 if (GET_MODE_SIZE (tmode) < max_size)
2263 mode = tmode;
2264
2265 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2266 {
2267 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2268 data.autinc_to = 1;
2269 data.explicit_inc_to = -1;
2270 }
2271 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2272 {
2273 data.to_addr = copy_addr_to_reg (to_addr);
2274 data.autinc_to = 1;
2275 data.explicit_inc_to = 1;
2276 }
2277 if (!data.autinc_to && CONSTANT_P (to_addr))
2278 data.to_addr = copy_addr_to_reg (to_addr);
2279 }
2280
2281 if (! SLOW_UNALIGNED_ACCESS
2282 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2283 align = MOVE_MAX;
2284
2285 /* First move what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2287
2288 while (max_size > 1)
2289 {
2290 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2291 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2292 if (GET_MODE_SIZE (tmode) < max_size)
2293 mode = tmode;
2294
2295 if (mode == VOIDmode)
2296 break;
2297
2298 icode = mov_optab->handlers[(int) mode].insn_code;
2299 if (icode != CODE_FOR_nothing
2300 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2301 GET_MODE_SIZE (mode)))
2302 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2303
2304 max_size = GET_MODE_SIZE (mode);
2305 }
2306
2307 /* The code above should have handled everything. */
2308 if (data.len != 0)
2309 abort ();
2310 }
2311
2312 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2313 with move instructions for mode MODE. GENFUN is the gen_... function
2314 to make a move insn for that mode. DATA has all the other info. */
2315
2316 static void
2317 clear_by_pieces_1 (genfun, mode, data)
2318 rtx (*genfun) PROTO ((rtx, ...));
2319 enum machine_mode mode;
2320 struct clear_by_pieces *data;
2321 {
2322 register int size = GET_MODE_SIZE (mode);
2323 register rtx to1;
2324
2325 while (data->len >= size)
2326 {
2327 if (data->reverse) data->offset -= size;
2328
2329 to1 = (data->autinc_to
2330 ? gen_rtx_MEM (mode, data->to_addr)
2331 : copy_rtx (change_address (data->to, mode,
2332 plus_constant (data->to_addr,
2333 data->offset))));
2334 MEM_IN_STRUCT_P (to1) = data->to_struct;
2335
2336 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2337 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2338
2339 emit_insn ((*genfun) (to1, const0_rtx));
2340 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2341 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2342
2343 if (! data->reverse) data->offset += size;
2344
2345 data->len -= size;
2346 }
2347 }
2348 \f
2349 /* Write zeros through the storage of OBJECT.
2350 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2351 the maximum alignment we can is has, measured in bytes.
2352
2353 If we call a function that returns the length of the block, return it. */
2354
2355 rtx
2356 clear_storage (object, size, align)
2357 rtx object;
2358 rtx size;
2359 int align;
2360 {
2361 #ifdef TARGET_MEM_FUNCTIONS
2362 static tree fn;
2363 tree call_expr, arg_list;
2364 #endif
2365 rtx retval = 0;
2366
2367 if (GET_MODE (object) == BLKmode)
2368 {
2369 object = protect_from_queue (object, 1);
2370 size = protect_from_queue (size, 0);
2371
2372 if (GET_CODE (size) == CONST_INT
2373 && MOVE_BY_PIECES_P (INTVAL (size), align))
2374 clear_by_pieces (object, INTVAL (size), align);
2375
2376 else
2377 {
2378 /* Try the most limited insn first, because there's no point
2379 including more than one in the machine description unless
2380 the more limited one has some advantage. */
2381
2382 rtx opalign = GEN_INT (align);
2383 enum machine_mode mode;
2384
2385 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2386 mode = GET_MODE_WIDER_MODE (mode))
2387 {
2388 enum insn_code code = clrstr_optab[(int) mode];
2389 insn_operand_predicate_fn pred;
2390
2391 if (code != CODE_FOR_nothing
2392 /* We don't need MODE to be narrower than
2393 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2394 the mode mask, as it is returned by the macro, it will
2395 definitely be less than the actual mode mask. */
2396 && ((GET_CODE (size) == CONST_INT
2397 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2398 <= (GET_MODE_MASK (mode) >> 1)))
2399 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2400 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2401 || (*pred) (object, BLKmode))
2402 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2403 || (*pred) (opalign, VOIDmode)))
2404 {
2405 rtx op1;
2406 rtx last = get_last_insn ();
2407 rtx pat;
2408
2409 op1 = convert_to_mode (mode, size, 1);
2410 pred = insn_data[(int) code].operand[1].predicate;
2411 if (pred != 0 && ! (*pred) (op1, mode))
2412 op1 = copy_to_mode_reg (mode, op1);
2413
2414 pat = GEN_FCN ((int) code) (object, op1, opalign);
2415 if (pat)
2416 {
2417 emit_insn (pat);
2418 return 0;
2419 }
2420 else
2421 delete_insns_since (last);
2422 }
2423 }
2424
2425 /* OBJECT or SIZE may have been passed through protect_from_queue.
2426
2427 It is unsafe to save the value generated by protect_from_queue
2428 and reuse it later. Consider what happens if emit_queue is
2429 called before the return value from protect_from_queue is used.
2430
2431 Expansion of the CALL_EXPR below will call emit_queue before
2432 we are finished emitting RTL for argument setup. So if we are
2433 not careful we could get the wrong value for an argument.
2434
2435 To avoid this problem we go ahead and emit code to copy OBJECT
2436 and SIZE into new pseudos. We can then place those new pseudos
2437 into an RTL_EXPR and use them later, even after a call to
2438 emit_queue.
2439
2440 Note this is not strictly needed for library calls since they
2441 do not call emit_queue before loading their arguments. However,
2442 we may need to have library calls call emit_queue in the future
2443 since failing to do so could cause problems for targets which
2444 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2445 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2446
2447 #ifdef TARGET_MEM_FUNCTIONS
2448 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2449 #else
2450 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2451 TREE_UNSIGNED (integer_type_node));
2452 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2453 #endif
2454
2455
2456 #ifdef TARGET_MEM_FUNCTIONS
2457 /* It is incorrect to use the libcall calling conventions to call
2458 memset in this context.
2459
2460 This could be a user call to memset and the user may wish to
2461 examine the return value from memset.
2462
2463 For targets where libcalls and normal calls have different
2464 conventions for returning pointers, we could end up generating
2465 incorrect code.
2466
2467 So instead of using a libcall sequence we build up a suitable
2468 CALL_EXPR and expand the call in the normal fashion. */
2469 if (fn == NULL_TREE)
2470 {
2471 tree fntype;
2472
2473 /* This was copied from except.c, I don't know if all this is
2474 necessary in this context or not. */
2475 fn = get_identifier ("memset");
2476 push_obstacks_nochange ();
2477 end_temporary_allocation ();
2478 fntype = build_pointer_type (void_type_node);
2479 fntype = build_function_type (fntype, NULL_TREE);
2480 fn = build_decl (FUNCTION_DECL, fn, fntype);
2481 ggc_add_tree_root (&fn, 1);
2482 DECL_EXTERNAL (fn) = 1;
2483 TREE_PUBLIC (fn) = 1;
2484 DECL_ARTIFICIAL (fn) = 1;
2485 make_decl_rtl (fn, NULL_PTR, 1);
2486 assemble_external (fn);
2487 pop_obstacks ();
2488 }
2489
2490 /* We need to make an argument list for the function call.
2491
2492 memset has three arguments, the first is a void * addresses, the
2493 second a integer with the initialization value, the last is a
2494 size_t byte count for the copy. */
2495 arg_list
2496 = build_tree_list (NULL_TREE,
2497 make_tree (build_pointer_type (void_type_node),
2498 object));
2499 TREE_CHAIN (arg_list)
2500 = build_tree_list (NULL_TREE,
2501 make_tree (integer_type_node, const0_rtx));
2502 TREE_CHAIN (TREE_CHAIN (arg_list))
2503 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2504 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2505
2506 /* Now we have to build up the CALL_EXPR itself. */
2507 call_expr = build1 (ADDR_EXPR,
2508 build_pointer_type (TREE_TYPE (fn)), fn);
2509 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2510 call_expr, arg_list, NULL_TREE);
2511 TREE_SIDE_EFFECTS (call_expr) = 1;
2512
2513 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2514 #else
2515 emit_library_call (bzero_libfunc, 0,
2516 VOIDmode, 2, object, Pmode, size,
2517 TYPE_MODE (integer_type_node));
2518 #endif
2519 }
2520 }
2521 else
2522 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2523
2524 return retval;
2525 }
2526
2527 /* Generate code to copy Y into X.
2528 Both Y and X must have the same mode, except that
2529 Y can be a constant with VOIDmode.
2530 This mode cannot be BLKmode; use emit_block_move for that.
2531
2532 Return the last instruction emitted. */
2533
2534 rtx
2535 emit_move_insn (x, y)
2536 rtx x, y;
2537 {
2538 enum machine_mode mode = GET_MODE (x);
2539
2540 x = protect_from_queue (x, 1);
2541 y = protect_from_queue (y, 0);
2542
2543 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2544 abort ();
2545
2546 /* Never force constant_p_rtx to memory. */
2547 if (GET_CODE (y) == CONSTANT_P_RTX)
2548 ;
2549 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2550 y = force_const_mem (mode, y);
2551
2552 /* If X or Y are memory references, verify that their addresses are valid
2553 for the machine. */
2554 if (GET_CODE (x) == MEM
2555 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2556 && ! push_operand (x, GET_MODE (x)))
2557 || (flag_force_addr
2558 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2559 x = change_address (x, VOIDmode, XEXP (x, 0));
2560
2561 if (GET_CODE (y) == MEM
2562 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2563 || (flag_force_addr
2564 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2565 y = change_address (y, VOIDmode, XEXP (y, 0));
2566
2567 if (mode == BLKmode)
2568 abort ();
2569
2570 return emit_move_insn_1 (x, y);
2571 }
2572
2573 /* Low level part of emit_move_insn.
2574 Called just like emit_move_insn, but assumes X and Y
2575 are basically valid. */
2576
2577 rtx
2578 emit_move_insn_1 (x, y)
2579 rtx x, y;
2580 {
2581 enum machine_mode mode = GET_MODE (x);
2582 enum machine_mode submode;
2583 enum mode_class class = GET_MODE_CLASS (mode);
2584 int i;
2585
2586 if (mode >= MAX_MACHINE_MODE)
2587 abort ();
2588
2589 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2590 return
2591 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2592
2593 /* Expand complex moves by moving real part and imag part, if possible. */
2594 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2595 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2596 * BITS_PER_UNIT),
2597 (class == MODE_COMPLEX_INT
2598 ? MODE_INT : MODE_FLOAT),
2599 0))
2600 && (mov_optab->handlers[(int) submode].insn_code
2601 != CODE_FOR_nothing))
2602 {
2603 /* Don't split destination if it is a stack push. */
2604 int stack = push_operand (x, GET_MODE (x));
2605
2606 /* If this is a stack, push the highpart first, so it
2607 will be in the argument order.
2608
2609 In that case, change_address is used only to convert
2610 the mode, not to change the address. */
2611 if (stack)
2612 {
2613 /* Note that the real part always precedes the imag part in memory
2614 regardless of machine's endianness. */
2615 #ifdef STACK_GROWS_DOWNWARD
2616 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2617 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2618 gen_imagpart (submode, y)));
2619 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2620 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2621 gen_realpart (submode, y)));
2622 #else
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2625 gen_realpart (submode, y)));
2626 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2627 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2628 gen_imagpart (submode, y)));
2629 #endif
2630 }
2631 else
2632 {
2633 /* If this is a complex value with each part being smaller than a
2634 word, the usual calling sequence will likely pack the pieces into
2635 a single register. Unfortunately, SUBREG of hard registers only
2636 deals in terms of words, so we have a problem converting input
2637 arguments to the CONCAT of two registers that is used elsewhere
2638 for complex values. If this is before reload, we can copy it into
2639 memory and reload. FIXME, we should see about using extract and
2640 insert on integer registers, but complex short and complex char
2641 variables should be rarely used. */
2642 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2643 && (reload_in_progress | reload_completed) == 0)
2644 {
2645 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2646 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2647
2648 if (packed_dest_p || packed_src_p)
2649 {
2650 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2651 ? MODE_FLOAT : MODE_INT);
2652
2653 enum machine_mode reg_mode =
2654 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2655
2656 if (reg_mode != BLKmode)
2657 {
2658 rtx mem = assign_stack_temp (reg_mode,
2659 GET_MODE_SIZE (mode), 0);
2660
2661 rtx cmem = change_address (mem, mode, NULL_RTX);
2662
2663 current_function->cannot_inline
2664 = "function uses short complex types";
2665
2666 if (packed_dest_p)
2667 {
2668 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2669 emit_move_insn_1 (cmem, y);
2670 return emit_move_insn_1 (sreg, mem);
2671 }
2672 else
2673 {
2674 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2675 emit_move_insn_1 (mem, sreg);
2676 return emit_move_insn_1 (x, cmem);
2677 }
2678 }
2679 }
2680 }
2681
2682 /* Show the output dies here. This is necessary for pseudos;
2683 hard regs shouldn't appear here except as return values.
2684 We never want to emit such a clobber after reload. */
2685 if (x != y
2686 && ! (reload_in_progress || reload_completed))
2687 {
2688 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2689 }
2690
2691 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2692 (gen_realpart (submode, x), gen_realpart (submode, y)));
2693 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2694 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2695 }
2696
2697 return get_last_insn ();
2698 }
2699
2700 /* This will handle any multi-word mode that lacks a move_insn pattern.
2701 However, you will get better code if you define such patterns,
2702 even if they must turn into multiple assembler instructions. */
2703 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2704 {
2705 rtx last_insn = 0;
2706
2707 #ifdef PUSH_ROUNDING
2708
2709 /* If X is a push on the stack, do the push now and replace
2710 X with a reference to the stack pointer. */
2711 if (push_operand (x, GET_MODE (x)))
2712 {
2713 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2714 x = change_address (x, VOIDmode, stack_pointer_rtx);
2715 }
2716 #endif
2717
2718 /* Show the output dies here. This is necessary for pseudos;
2719 hard regs shouldn't appear here except as return values.
2720 We never want to emit such a clobber after reload. */
2721 if (x != y
2722 && ! (reload_in_progress || reload_completed))
2723 {
2724 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2725 }
2726
2727 for (i = 0;
2728 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2729 i++)
2730 {
2731 rtx xpart = operand_subword (x, i, 1, mode);
2732 rtx ypart = operand_subword (y, i, 1, mode);
2733
2734 /* If we can't get a part of Y, put Y into memory if it is a
2735 constant. Otherwise, force it into a register. If we still
2736 can't get a part of Y, abort. */
2737 if (ypart == 0 && CONSTANT_P (y))
2738 {
2739 y = force_const_mem (mode, y);
2740 ypart = operand_subword (y, i, 1, mode);
2741 }
2742 else if (ypart == 0)
2743 ypart = operand_subword_force (y, i, mode);
2744
2745 if (xpart == 0 || ypart == 0)
2746 abort ();
2747
2748 last_insn = emit_move_insn (xpart, ypart);
2749 }
2750
2751 return last_insn;
2752 }
2753 else
2754 abort ();
2755 }
2756 \f
2757 /* Pushing data onto the stack. */
2758
2759 /* Push a block of length SIZE (perhaps variable)
2760 and return an rtx to address the beginning of the block.
2761 Note that it is not possible for the value returned to be a QUEUED.
2762 The value may be virtual_outgoing_args_rtx.
2763
2764 EXTRA is the number of bytes of padding to push in addition to SIZE.
2765 BELOW nonzero means this padding comes at low addresses;
2766 otherwise, the padding comes at high addresses. */
2767
2768 rtx
2769 push_block (size, extra, below)
2770 rtx size;
2771 int extra, below;
2772 {
2773 register rtx temp;
2774
2775 size = convert_modes (Pmode, ptr_mode, size, 1);
2776 if (CONSTANT_P (size))
2777 anti_adjust_stack (plus_constant (size, extra));
2778 else if (GET_CODE (size) == REG && extra == 0)
2779 anti_adjust_stack (size);
2780 else
2781 {
2782 rtx temp = copy_to_mode_reg (Pmode, size);
2783 if (extra != 0)
2784 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2785 temp, 0, OPTAB_LIB_WIDEN);
2786 anti_adjust_stack (temp);
2787 }
2788
2789 #if defined (STACK_GROWS_DOWNWARD) \
2790 || (defined (ARGS_GROW_DOWNWARD) \
2791 && !defined (ACCUMULATE_OUTGOING_ARGS))
2792
2793 /* Return the lowest stack address when STACK or ARGS grow downward and
2794 we are not aaccumulating outgoing arguments (the c4x port uses such
2795 conventions). */
2796 temp = virtual_outgoing_args_rtx;
2797 if (extra != 0 && below)
2798 temp = plus_constant (temp, extra);
2799 #else
2800 if (GET_CODE (size) == CONST_INT)
2801 temp = plus_constant (virtual_outgoing_args_rtx,
2802 - INTVAL (size) - (below ? 0 : extra));
2803 else if (extra != 0 && !below)
2804 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2805 negate_rtx (Pmode, plus_constant (size, extra)));
2806 else
2807 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2808 negate_rtx (Pmode, size));
2809 #endif
2810
2811 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2812 }
2813
2814 rtx
2815 gen_push_operand ()
2816 {
2817 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2818 }
2819
2820 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2821 block of SIZE bytes. */
2822
2823 static rtx
2824 get_push_address (size)
2825 int size;
2826 {
2827 register rtx temp;
2828
2829 if (STACK_PUSH_CODE == POST_DEC)
2830 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2831 else if (STACK_PUSH_CODE == POST_INC)
2832 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2833 else
2834 temp = stack_pointer_rtx;
2835
2836 return copy_to_reg (temp);
2837 }
2838
2839 /* Generate code to push X onto the stack, assuming it has mode MODE and
2840 type TYPE.
2841 MODE is redundant except when X is a CONST_INT (since they don't
2842 carry mode info).
2843 SIZE is an rtx for the size of data to be copied (in bytes),
2844 needed only if X is BLKmode.
2845
2846 ALIGN (in bytes) is maximum alignment we can assume.
2847
2848 If PARTIAL and REG are both nonzero, then copy that many of the first
2849 words of X into registers starting with REG, and push the rest of X.
2850 The amount of space pushed is decreased by PARTIAL words,
2851 rounded *down* to a multiple of PARM_BOUNDARY.
2852 REG must be a hard register in this case.
2853 If REG is zero but PARTIAL is not, take any all others actions for an
2854 argument partially in registers, but do not actually load any
2855 registers.
2856
2857 EXTRA is the amount in bytes of extra space to leave next to this arg.
2858 This is ignored if an argument block has already been allocated.
2859
2860 On a machine that lacks real push insns, ARGS_ADDR is the address of
2861 the bottom of the argument block for this call. We use indexing off there
2862 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2863 argument block has not been preallocated.
2864
2865 ARGS_SO_FAR is the size of args previously pushed for this call.
2866
2867 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2868 for arguments passed in registers. If nonzero, it will be the number
2869 of bytes required. */
2870
2871 void
2872 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2873 args_addr, args_so_far, reg_parm_stack_space,
2874 alignment_pad)
2875 register rtx x;
2876 enum machine_mode mode;
2877 tree type;
2878 rtx size;
2879 int align;
2880 int partial;
2881 rtx reg;
2882 int extra;
2883 rtx args_addr;
2884 rtx args_so_far;
2885 int reg_parm_stack_space;
2886 rtx alignment_pad;
2887 {
2888 rtx xinner;
2889 enum direction stack_direction
2890 #ifdef STACK_GROWS_DOWNWARD
2891 = downward;
2892 #else
2893 = upward;
2894 #endif
2895
2896 /* Decide where to pad the argument: `downward' for below,
2897 `upward' for above, or `none' for don't pad it.
2898 Default is below for small data on big-endian machines; else above. */
2899 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2900
2901 /* Invert direction if stack is post-update. */
2902 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2903 if (where_pad != none)
2904 where_pad = (where_pad == downward ? upward : downward);
2905
2906 xinner = x = protect_from_queue (x, 0);
2907
2908 if (mode == BLKmode)
2909 {
2910 /* Copy a block into the stack, entirely or partially. */
2911
2912 register rtx temp;
2913 int used = partial * UNITS_PER_WORD;
2914 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2915 int skip;
2916
2917 if (size == 0)
2918 abort ();
2919
2920 used -= offset;
2921
2922 /* USED is now the # of bytes we need not copy to the stack
2923 because registers will take care of them. */
2924
2925 if (partial != 0)
2926 xinner = change_address (xinner, BLKmode,
2927 plus_constant (XEXP (xinner, 0), used));
2928
2929 /* If the partial register-part of the arg counts in its stack size,
2930 skip the part of stack space corresponding to the registers.
2931 Otherwise, start copying to the beginning of the stack space,
2932 by setting SKIP to 0. */
2933 skip = (reg_parm_stack_space == 0) ? 0 : used;
2934
2935 #ifdef PUSH_ROUNDING
2936 /* Do it with several push insns if that doesn't take lots of insns
2937 and if there is no difficulty with push insns that skip bytes
2938 on the stack for alignment purposes. */
2939 if (args_addr == 0
2940 && GET_CODE (size) == CONST_INT
2941 && skip == 0
2942 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2943 /* Here we avoid the case of a structure whose weak alignment
2944 forces many pushes of a small amount of data,
2945 and such small pushes do rounding that causes trouble. */
2946 && ((! SLOW_UNALIGNED_ACCESS)
2947 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2948 || PUSH_ROUNDING (align) == align)
2949 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2950 {
2951 /* Push padding now if padding above and stack grows down,
2952 or if padding below and stack grows up.
2953 But if space already allocated, this has already been done. */
2954 if (extra && args_addr == 0
2955 && where_pad != none && where_pad != stack_direction)
2956 anti_adjust_stack (GEN_INT (extra));
2957
2958 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2959 INTVAL (size) - used, align);
2960
2961 if (current_function_check_memory_usage && ! in_check_memory_usage)
2962 {
2963 rtx temp;
2964
2965 in_check_memory_usage = 1;
2966 temp = get_push_address (INTVAL(size) - used);
2967 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2968 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2969 temp, Pmode,
2970 XEXP (xinner, 0), Pmode,
2971 GEN_INT (INTVAL(size) - used),
2972 TYPE_MODE (sizetype));
2973 else
2974 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2975 temp, Pmode,
2976 GEN_INT (INTVAL(size) - used),
2977 TYPE_MODE (sizetype),
2978 GEN_INT (MEMORY_USE_RW),
2979 TYPE_MODE (integer_type_node));
2980 in_check_memory_usage = 0;
2981 }
2982 }
2983 else
2984 #endif /* PUSH_ROUNDING */
2985 {
2986 /* Otherwise make space on the stack and copy the data
2987 to the address of that space. */
2988
2989 /* Deduct words put into registers from the size we must copy. */
2990 if (partial != 0)
2991 {
2992 if (GET_CODE (size) == CONST_INT)
2993 size = GEN_INT (INTVAL (size) - used);
2994 else
2995 size = expand_binop (GET_MODE (size), sub_optab, size,
2996 GEN_INT (used), NULL_RTX, 0,
2997 OPTAB_LIB_WIDEN);
2998 }
2999
3000 /* Get the address of the stack space.
3001 In this case, we do not deal with EXTRA separately.
3002 A single stack adjust will do. */
3003 if (! args_addr)
3004 {
3005 temp = push_block (size, extra, where_pad == downward);
3006 extra = 0;
3007 }
3008 else if (GET_CODE (args_so_far) == CONST_INT)
3009 temp = memory_address (BLKmode,
3010 plus_constant (args_addr,
3011 skip + INTVAL (args_so_far)));
3012 else
3013 temp = memory_address (BLKmode,
3014 plus_constant (gen_rtx_PLUS (Pmode,
3015 args_addr,
3016 args_so_far),
3017 skip));
3018 if (current_function_check_memory_usage && ! in_check_memory_usage)
3019 {
3020 rtx target;
3021
3022 in_check_memory_usage = 1;
3023 target = copy_to_reg (temp);
3024 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3025 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3026 target, Pmode,
3027 XEXP (xinner, 0), Pmode,
3028 size, TYPE_MODE (sizetype));
3029 else
3030 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3031 target, Pmode,
3032 size, TYPE_MODE (sizetype),
3033 GEN_INT (MEMORY_USE_RW),
3034 TYPE_MODE (integer_type_node));
3035 in_check_memory_usage = 0;
3036 }
3037
3038 /* TEMP is the address of the block. Copy the data there. */
3039 if (GET_CODE (size) == CONST_INT
3040 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3041 {
3042 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3043 INTVAL (size), align);
3044 goto ret;
3045 }
3046 else
3047 {
3048 rtx opalign = GEN_INT (align);
3049 enum machine_mode mode;
3050 rtx target = gen_rtx_MEM (BLKmode, temp);
3051
3052 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3053 mode != VOIDmode;
3054 mode = GET_MODE_WIDER_MODE (mode))
3055 {
3056 enum insn_code code = movstr_optab[(int) mode];
3057 insn_operand_predicate_fn pred;
3058
3059 if (code != CODE_FOR_nothing
3060 && ((GET_CODE (size) == CONST_INT
3061 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3062 <= (GET_MODE_MASK (mode) >> 1)))
3063 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3064 && (!(pred = insn_data[(int) code].operand[0].predicate)
3065 || ((*pred) (target, BLKmode)))
3066 && (!(pred = insn_data[(int) code].operand[1].predicate)
3067 || ((*pred) (xinner, BLKmode)))
3068 && (!(pred = insn_data[(int) code].operand[3].predicate)
3069 || ((*pred) (opalign, VOIDmode))))
3070 {
3071 rtx op2 = convert_to_mode (mode, size, 1);
3072 rtx last = get_last_insn ();
3073 rtx pat;
3074
3075 pred = insn_data[(int) code].operand[2].predicate;
3076 if (pred != 0 && ! (*pred) (op2, mode))
3077 op2 = copy_to_mode_reg (mode, op2);
3078
3079 pat = GEN_FCN ((int) code) (target, xinner,
3080 op2, opalign);
3081 if (pat)
3082 {
3083 emit_insn (pat);
3084 goto ret;
3085 }
3086 else
3087 delete_insns_since (last);
3088 }
3089 }
3090 }
3091
3092 #ifndef ACCUMULATE_OUTGOING_ARGS
3093 /* If the source is referenced relative to the stack pointer,
3094 copy it to another register to stabilize it. We do not need
3095 to do this if we know that we won't be changing sp. */
3096
3097 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3098 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3099 temp = copy_to_reg (temp);
3100 #endif
3101
3102 /* Make inhibit_defer_pop nonzero around the library call
3103 to force it to pop the bcopy-arguments right away. */
3104 NO_DEFER_POP;
3105 #ifdef TARGET_MEM_FUNCTIONS
3106 emit_library_call (memcpy_libfunc, 0,
3107 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3108 convert_to_mode (TYPE_MODE (sizetype),
3109 size, TREE_UNSIGNED (sizetype)),
3110 TYPE_MODE (sizetype));
3111 #else
3112 emit_library_call (bcopy_libfunc, 0,
3113 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3114 convert_to_mode (TYPE_MODE (integer_type_node),
3115 size,
3116 TREE_UNSIGNED (integer_type_node)),
3117 TYPE_MODE (integer_type_node));
3118 #endif
3119 OK_DEFER_POP;
3120 }
3121 }
3122 else if (partial > 0)
3123 {
3124 /* Scalar partly in registers. */
3125
3126 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3127 int i;
3128 int not_stack;
3129 /* # words of start of argument
3130 that we must make space for but need not store. */
3131 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3132 int args_offset = INTVAL (args_so_far);
3133 int skip;
3134
3135 /* Push padding now if padding above and stack grows down,
3136 or if padding below and stack grows up.
3137 But if space already allocated, this has already been done. */
3138 if (extra && args_addr == 0
3139 && where_pad != none && where_pad != stack_direction)
3140 anti_adjust_stack (GEN_INT (extra));
3141
3142 /* If we make space by pushing it, we might as well push
3143 the real data. Otherwise, we can leave OFFSET nonzero
3144 and leave the space uninitialized. */
3145 if (args_addr == 0)
3146 offset = 0;
3147
3148 /* Now NOT_STACK gets the number of words that we don't need to
3149 allocate on the stack. */
3150 not_stack = partial - offset;
3151
3152 /* If the partial register-part of the arg counts in its stack size,
3153 skip the part of stack space corresponding to the registers.
3154 Otherwise, start copying to the beginning of the stack space,
3155 by setting SKIP to 0. */
3156 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3157
3158 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3159 x = validize_mem (force_const_mem (mode, x));
3160
3161 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3162 SUBREGs of such registers are not allowed. */
3163 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3164 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3165 x = copy_to_reg (x);
3166
3167 /* Loop over all the words allocated on the stack for this arg. */
3168 /* We can do it by words, because any scalar bigger than a word
3169 has a size a multiple of a word. */
3170 #ifndef PUSH_ARGS_REVERSED
3171 for (i = not_stack; i < size; i++)
3172 #else
3173 for (i = size - 1; i >= not_stack; i--)
3174 #endif
3175 if (i >= not_stack + offset)
3176 emit_push_insn (operand_subword_force (x, i, mode),
3177 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3178 0, args_addr,
3179 GEN_INT (args_offset + ((i - not_stack + skip)
3180 * UNITS_PER_WORD)),
3181 reg_parm_stack_space, alignment_pad);
3182 }
3183 else
3184 {
3185 rtx addr;
3186 rtx target = NULL_RTX;
3187
3188 /* Push padding now if padding above and stack grows down,
3189 or if padding below and stack grows up.
3190 But if space already allocated, this has already been done. */
3191 if (extra && args_addr == 0
3192 && where_pad != none && where_pad != stack_direction)
3193 anti_adjust_stack (GEN_INT (extra));
3194
3195 #ifdef PUSH_ROUNDING
3196 if (args_addr == 0)
3197 addr = gen_push_operand ();
3198 else
3199 #endif
3200 {
3201 if (GET_CODE (args_so_far) == CONST_INT)
3202 addr
3203 = memory_address (mode,
3204 plus_constant (args_addr,
3205 INTVAL (args_so_far)));
3206 else
3207 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3208 args_so_far));
3209 target = addr;
3210 }
3211
3212 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3213
3214 if (current_function_check_memory_usage && ! in_check_memory_usage)
3215 {
3216 in_check_memory_usage = 1;
3217 if (target == 0)
3218 target = get_push_address (GET_MODE_SIZE (mode));
3219
3220 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3221 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3222 target, Pmode,
3223 XEXP (x, 0), Pmode,
3224 GEN_INT (GET_MODE_SIZE (mode)),
3225 TYPE_MODE (sizetype));
3226 else
3227 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3228 target, Pmode,
3229 GEN_INT (GET_MODE_SIZE (mode)),
3230 TYPE_MODE (sizetype),
3231 GEN_INT (MEMORY_USE_RW),
3232 TYPE_MODE (integer_type_node));
3233 in_check_memory_usage = 0;
3234 }
3235 }
3236
3237 ret:
3238 /* If part should go in registers, copy that part
3239 into the appropriate registers. Do this now, at the end,
3240 since mem-to-mem copies above may do function calls. */
3241 if (partial > 0 && reg != 0)
3242 {
3243 /* Handle calls that pass values in multiple non-contiguous locations.
3244 The Irix 6 ABI has examples of this. */
3245 if (GET_CODE (reg) == PARALLEL)
3246 emit_group_load (reg, x, -1, align); /* ??? size? */
3247 else
3248 move_block_to_reg (REGNO (reg), x, partial, mode);
3249 }
3250
3251 if (extra && args_addr == 0 && where_pad == stack_direction)
3252 anti_adjust_stack (GEN_INT (extra));
3253
3254 if (alignment_pad)
3255 anti_adjust_stack (alignment_pad);
3256 }
3257 \f
3258 /* Expand an assignment that stores the value of FROM into TO.
3259 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3260 (This may contain a QUEUED rtx;
3261 if the value is constant, this rtx is a constant.)
3262 Otherwise, the returned value is NULL_RTX.
3263
3264 SUGGEST_REG is no longer actually used.
3265 It used to mean, copy the value through a register
3266 and return that register, if that is possible.
3267 We now use WANT_VALUE to decide whether to do this. */
3268
3269 rtx
3270 expand_assignment (to, from, want_value, suggest_reg)
3271 tree to, from;
3272 int want_value;
3273 int suggest_reg ATTRIBUTE_UNUSED;
3274 {
3275 register rtx to_rtx = 0;
3276 rtx result;
3277
3278 /* Don't crash if the lhs of the assignment was erroneous. */
3279
3280 if (TREE_CODE (to) == ERROR_MARK)
3281 {
3282 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3283 return want_value ? result : NULL_RTX;
3284 }
3285
3286 /* Assignment of a structure component needs special treatment
3287 if the structure component's rtx is not simply a MEM.
3288 Assignment of an array element at a constant index, and assignment of
3289 an array element in an unaligned packed structure field, has the same
3290 problem. */
3291
3292 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3293 || TREE_CODE (to) == ARRAY_REF)
3294 {
3295 enum machine_mode mode1;
3296 int bitsize;
3297 int bitpos;
3298 tree offset;
3299 int unsignedp;
3300 int volatilep = 0;
3301 tree tem;
3302 int alignment;
3303
3304 push_temp_slots ();
3305 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3306 &unsignedp, &volatilep, &alignment);
3307
3308 /* If we are going to use store_bit_field and extract_bit_field,
3309 make sure to_rtx will be safe for multiple use. */
3310
3311 if (mode1 == VOIDmode && want_value)
3312 tem = stabilize_reference (tem);
3313
3314 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3315 if (offset != 0)
3316 {
3317 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3318
3319 if (GET_CODE (to_rtx) != MEM)
3320 abort ();
3321
3322 if (GET_MODE (offset_rtx) != ptr_mode)
3323 {
3324 #ifdef POINTERS_EXTEND_UNSIGNED
3325 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3326 #else
3327 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3328 #endif
3329 }
3330
3331 /* A constant address in TO_RTX can have VOIDmode, we must not try
3332 to call force_reg for that case. Avoid that case. */
3333 if (GET_CODE (to_rtx) == MEM
3334 && GET_MODE (to_rtx) == BLKmode
3335 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3336 && bitsize
3337 && (bitpos % bitsize) == 0
3338 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3339 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3340 {
3341 rtx temp = change_address (to_rtx, mode1,
3342 plus_constant (XEXP (to_rtx, 0),
3343 (bitpos /
3344 BITS_PER_UNIT)));
3345 if (GET_CODE (XEXP (temp, 0)) == REG)
3346 to_rtx = temp;
3347 else
3348 to_rtx = change_address (to_rtx, mode1,
3349 force_reg (GET_MODE (XEXP (temp, 0)),
3350 XEXP (temp, 0)));
3351 bitpos = 0;
3352 }
3353
3354 to_rtx = change_address (to_rtx, VOIDmode,
3355 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3356 force_reg (ptr_mode,
3357 offset_rtx)));
3358 }
3359
3360 if (volatilep)
3361 {
3362 if (GET_CODE (to_rtx) == MEM)
3363 {
3364 /* When the offset is zero, to_rtx is the address of the
3365 structure we are storing into, and hence may be shared.
3366 We must make a new MEM before setting the volatile bit. */
3367 if (offset == 0)
3368 to_rtx = copy_rtx (to_rtx);
3369
3370 MEM_VOLATILE_P (to_rtx) = 1;
3371 }
3372 #if 0 /* This was turned off because, when a field is volatile
3373 in an object which is not volatile, the object may be in a register,
3374 and then we would abort over here. */
3375 else
3376 abort ();
3377 #endif
3378 }
3379
3380 if (TREE_CODE (to) == COMPONENT_REF
3381 && TREE_READONLY (TREE_OPERAND (to, 1)))
3382 {
3383 if (offset == 0)
3384 to_rtx = copy_rtx (to_rtx);
3385
3386 RTX_UNCHANGING_P (to_rtx) = 1;
3387 }
3388
3389 /* Check the access. */
3390 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3391 {
3392 rtx to_addr;
3393 int size;
3394 int best_mode_size;
3395 enum machine_mode best_mode;
3396
3397 best_mode = get_best_mode (bitsize, bitpos,
3398 TYPE_ALIGN (TREE_TYPE (tem)),
3399 mode1, volatilep);
3400 if (best_mode == VOIDmode)
3401 best_mode = QImode;
3402
3403 best_mode_size = GET_MODE_BITSIZE (best_mode);
3404 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3405 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3406 size *= GET_MODE_SIZE (best_mode);
3407
3408 /* Check the access right of the pointer. */
3409 if (size)
3410 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3411 to_addr, Pmode,
3412 GEN_INT (size), TYPE_MODE (sizetype),
3413 GEN_INT (MEMORY_USE_WO),
3414 TYPE_MODE (integer_type_node));
3415 }
3416
3417 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3418 (want_value
3419 /* Spurious cast makes HPUX compiler happy. */
3420 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3421 : VOIDmode),
3422 unsignedp,
3423 /* Required alignment of containing datum. */
3424 alignment,
3425 int_size_in_bytes (TREE_TYPE (tem)),
3426 get_alias_set (to));
3427 preserve_temp_slots (result);
3428 free_temp_slots ();
3429 pop_temp_slots ();
3430
3431 /* If the value is meaningful, convert RESULT to the proper mode.
3432 Otherwise, return nothing. */
3433 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3434 TYPE_MODE (TREE_TYPE (from)),
3435 result,
3436 TREE_UNSIGNED (TREE_TYPE (to)))
3437 : NULL_RTX);
3438 }
3439
3440 /* If the rhs is a function call and its value is not an aggregate,
3441 call the function before we start to compute the lhs.
3442 This is needed for correct code for cases such as
3443 val = setjmp (buf) on machines where reference to val
3444 requires loading up part of an address in a separate insn.
3445
3446 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3447 a promoted variable where the zero- or sign- extension needs to be done.
3448 Handling this in the normal way is safe because no computation is done
3449 before the call. */
3450 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3451 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3452 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3453 {
3454 rtx value;
3455
3456 push_temp_slots ();
3457 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3458 if (to_rtx == 0)
3459 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3460
3461 /* Handle calls that return values in multiple non-contiguous locations.
3462 The Irix 6 ABI has examples of this. */
3463 if (GET_CODE (to_rtx) == PARALLEL)
3464 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3465 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3466 else if (GET_MODE (to_rtx) == BLKmode)
3467 emit_block_move (to_rtx, value, expr_size (from),
3468 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3469 else
3470 {
3471 #ifdef POINTERS_EXTEND_UNSIGNED
3472 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3473 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3474 value = convert_memory_address (GET_MODE (to_rtx), value);
3475 #endif
3476 emit_move_insn (to_rtx, value);
3477 }
3478 preserve_temp_slots (to_rtx);
3479 free_temp_slots ();
3480 pop_temp_slots ();
3481 return want_value ? to_rtx : NULL_RTX;
3482 }
3483
3484 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3485 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3486
3487 if (to_rtx == 0)
3488 {
3489 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3490 if (GET_CODE (to_rtx) == MEM)
3491 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3492 }
3493
3494 /* Don't move directly into a return register. */
3495 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3496 {
3497 rtx temp;
3498
3499 push_temp_slots ();
3500 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3501 emit_move_insn (to_rtx, temp);
3502 preserve_temp_slots (to_rtx);
3503 free_temp_slots ();
3504 pop_temp_slots ();
3505 return want_value ? to_rtx : NULL_RTX;
3506 }
3507
3508 /* In case we are returning the contents of an object which overlaps
3509 the place the value is being stored, use a safe function when copying
3510 a value through a pointer into a structure value return block. */
3511 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3512 && current_function_returns_struct
3513 && !current_function_returns_pcc_struct)
3514 {
3515 rtx from_rtx, size;
3516
3517 push_temp_slots ();
3518 size = expr_size (from);
3519 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3520 EXPAND_MEMORY_USE_DONT);
3521
3522 /* Copy the rights of the bitmap. */
3523 if (current_function_check_memory_usage)
3524 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3525 XEXP (to_rtx, 0), Pmode,
3526 XEXP (from_rtx, 0), Pmode,
3527 convert_to_mode (TYPE_MODE (sizetype),
3528 size, TREE_UNSIGNED (sizetype)),
3529 TYPE_MODE (sizetype));
3530
3531 #ifdef TARGET_MEM_FUNCTIONS
3532 emit_library_call (memcpy_libfunc, 0,
3533 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3534 XEXP (from_rtx, 0), Pmode,
3535 convert_to_mode (TYPE_MODE (sizetype),
3536 size, TREE_UNSIGNED (sizetype)),
3537 TYPE_MODE (sizetype));
3538 #else
3539 emit_library_call (bcopy_libfunc, 0,
3540 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3541 XEXP (to_rtx, 0), Pmode,
3542 convert_to_mode (TYPE_MODE (integer_type_node),
3543 size, TREE_UNSIGNED (integer_type_node)),
3544 TYPE_MODE (integer_type_node));
3545 #endif
3546
3547 preserve_temp_slots (to_rtx);
3548 free_temp_slots ();
3549 pop_temp_slots ();
3550 return want_value ? to_rtx : NULL_RTX;
3551 }
3552
3553 /* Compute FROM and store the value in the rtx we got. */
3554
3555 push_temp_slots ();
3556 result = store_expr (from, to_rtx, want_value);
3557 preserve_temp_slots (result);
3558 free_temp_slots ();
3559 pop_temp_slots ();
3560 return want_value ? result : NULL_RTX;
3561 }
3562
3563 /* Generate code for computing expression EXP,
3564 and storing the value into TARGET.
3565 TARGET may contain a QUEUED rtx.
3566
3567 If WANT_VALUE is nonzero, return a copy of the value
3568 not in TARGET, so that we can be sure to use the proper
3569 value in a containing expression even if TARGET has something
3570 else stored in it. If possible, we copy the value through a pseudo
3571 and return that pseudo. Or, if the value is constant, we try to
3572 return the constant. In some cases, we return a pseudo
3573 copied *from* TARGET.
3574
3575 If the mode is BLKmode then we may return TARGET itself.
3576 It turns out that in BLKmode it doesn't cause a problem.
3577 because C has no operators that could combine two different
3578 assignments into the same BLKmode object with different values
3579 with no sequence point. Will other languages need this to
3580 be more thorough?
3581
3582 If WANT_VALUE is 0, we return NULL, to make sure
3583 to catch quickly any cases where the caller uses the value
3584 and fails to set WANT_VALUE. */
3585
3586 rtx
3587 store_expr (exp, target, want_value)
3588 register tree exp;
3589 register rtx target;
3590 int want_value;
3591 {
3592 register rtx temp;
3593 int dont_return_target = 0;
3594
3595 if (TREE_CODE (exp) == COMPOUND_EXPR)
3596 {
3597 /* Perform first part of compound expression, then assign from second
3598 part. */
3599 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3600 emit_queue ();
3601 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3602 }
3603 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3604 {
3605 /* For conditional expression, get safe form of the target. Then
3606 test the condition, doing the appropriate assignment on either
3607 side. This avoids the creation of unnecessary temporaries.
3608 For non-BLKmode, it is more efficient not to do this. */
3609
3610 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3611
3612 emit_queue ();
3613 target = protect_from_queue (target, 1);
3614
3615 do_pending_stack_adjust ();
3616 NO_DEFER_POP;
3617 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3618 start_cleanup_deferral ();
3619 store_expr (TREE_OPERAND (exp, 1), target, 0);
3620 end_cleanup_deferral ();
3621 emit_queue ();
3622 emit_jump_insn (gen_jump (lab2));
3623 emit_barrier ();
3624 emit_label (lab1);
3625 start_cleanup_deferral ();
3626 store_expr (TREE_OPERAND (exp, 2), target, 0);
3627 end_cleanup_deferral ();
3628 emit_queue ();
3629 emit_label (lab2);
3630 OK_DEFER_POP;
3631
3632 return want_value ? target : NULL_RTX;
3633 }
3634 else if (queued_subexp_p (target))
3635 /* If target contains a postincrement, let's not risk
3636 using it as the place to generate the rhs. */
3637 {
3638 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3639 {
3640 /* Expand EXP into a new pseudo. */
3641 temp = gen_reg_rtx (GET_MODE (target));
3642 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3643 }
3644 else
3645 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3646
3647 /* If target is volatile, ANSI requires accessing the value
3648 *from* the target, if it is accessed. So make that happen.
3649 In no case return the target itself. */
3650 if (! MEM_VOLATILE_P (target) && want_value)
3651 dont_return_target = 1;
3652 }
3653 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3654 && GET_MODE (target) != BLKmode)
3655 /* If target is in memory and caller wants value in a register instead,
3656 arrange that. Pass TARGET as target for expand_expr so that,
3657 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3658 We know expand_expr will not use the target in that case.
3659 Don't do this if TARGET is volatile because we are supposed
3660 to write it and then read it. */
3661 {
3662 temp = expand_expr (exp, target, GET_MODE (target), 0);
3663 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3664 temp = copy_to_reg (temp);
3665 dont_return_target = 1;
3666 }
3667 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3668 /* If this is an scalar in a register that is stored in a wider mode
3669 than the declared mode, compute the result into its declared mode
3670 and then convert to the wider mode. Our value is the computed
3671 expression. */
3672 {
3673 /* If we don't want a value, we can do the conversion inside EXP,
3674 which will often result in some optimizations. Do the conversion
3675 in two steps: first change the signedness, if needed, then
3676 the extend. But don't do this if the type of EXP is a subtype
3677 of something else since then the conversion might involve
3678 more than just converting modes. */
3679 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3680 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3681 {
3682 if (TREE_UNSIGNED (TREE_TYPE (exp))
3683 != SUBREG_PROMOTED_UNSIGNED_P (target))
3684 exp
3685 = convert
3686 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3687 TREE_TYPE (exp)),
3688 exp);
3689
3690 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3691 SUBREG_PROMOTED_UNSIGNED_P (target)),
3692 exp);
3693 }
3694
3695 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3696
3697 /* If TEMP is a volatile MEM and we want a result value, make
3698 the access now so it gets done only once. Likewise if
3699 it contains TARGET. */
3700 if (GET_CODE (temp) == MEM && want_value
3701 && (MEM_VOLATILE_P (temp)
3702 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3703 temp = copy_to_reg (temp);
3704
3705 /* If TEMP is a VOIDmode constant, use convert_modes to make
3706 sure that we properly convert it. */
3707 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3708 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3709 TYPE_MODE (TREE_TYPE (exp)), temp,
3710 SUBREG_PROMOTED_UNSIGNED_P (target));
3711
3712 convert_move (SUBREG_REG (target), temp,
3713 SUBREG_PROMOTED_UNSIGNED_P (target));
3714
3715 /* If we promoted a constant, change the mode back down to match
3716 target. Otherwise, the caller might get confused by a result whose
3717 mode is larger than expected. */
3718
3719 if (want_value && GET_MODE (temp) != GET_MODE (target)
3720 && GET_MODE (temp) != VOIDmode)
3721 {
3722 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3723 SUBREG_PROMOTED_VAR_P (temp) = 1;
3724 SUBREG_PROMOTED_UNSIGNED_P (temp)
3725 = SUBREG_PROMOTED_UNSIGNED_P (target);
3726 }
3727
3728 return want_value ? temp : NULL_RTX;
3729 }
3730 else
3731 {
3732 temp = expand_expr (exp, target, GET_MODE (target), 0);
3733 /* Return TARGET if it's a specified hardware register.
3734 If TARGET is a volatile mem ref, either return TARGET
3735 or return a reg copied *from* TARGET; ANSI requires this.
3736
3737 Otherwise, if TEMP is not TARGET, return TEMP
3738 if it is constant (for efficiency),
3739 or if we really want the correct value. */
3740 if (!(target && GET_CODE (target) == REG
3741 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3742 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3743 && ! rtx_equal_p (temp, target)
3744 && (CONSTANT_P (temp) || want_value))
3745 dont_return_target = 1;
3746 }
3747
3748 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3749 the same as that of TARGET, adjust the constant. This is needed, for
3750 example, in case it is a CONST_DOUBLE and we want only a word-sized
3751 value. */
3752 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3753 && TREE_CODE (exp) != ERROR_MARK
3754 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3755 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3756 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3757
3758 if (current_function_check_memory_usage
3759 && GET_CODE (target) == MEM
3760 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3761 {
3762 if (GET_CODE (temp) == MEM)
3763 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3764 XEXP (target, 0), Pmode,
3765 XEXP (temp, 0), Pmode,
3766 expr_size (exp), TYPE_MODE (sizetype));
3767 else
3768 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3769 XEXP (target, 0), Pmode,
3770 expr_size (exp), TYPE_MODE (sizetype),
3771 GEN_INT (MEMORY_USE_WO),
3772 TYPE_MODE (integer_type_node));
3773 }
3774
3775 /* If value was not generated in the target, store it there.
3776 Convert the value to TARGET's type first if nec. */
3777 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3778 one or both of them are volatile memory refs, we have to distinguish
3779 two cases:
3780 - expand_expr has used TARGET. In this case, we must not generate
3781 another copy. This can be detected by TARGET being equal according
3782 to == .
3783 - expand_expr has not used TARGET - that means that the source just
3784 happens to have the same RTX form. Since temp will have been created
3785 by expand_expr, it will compare unequal according to == .
3786 We must generate a copy in this case, to reach the correct number
3787 of volatile memory references. */
3788
3789 if ((! rtx_equal_p (temp, target)
3790 || (temp != target && (side_effects_p (temp)
3791 || side_effects_p (target))))
3792 && TREE_CODE (exp) != ERROR_MARK)
3793 {
3794 target = protect_from_queue (target, 1);
3795 if (GET_MODE (temp) != GET_MODE (target)
3796 && GET_MODE (temp) != VOIDmode)
3797 {
3798 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3799 if (dont_return_target)
3800 {
3801 /* In this case, we will return TEMP,
3802 so make sure it has the proper mode.
3803 But don't forget to store the value into TARGET. */
3804 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3805 emit_move_insn (target, temp);
3806 }
3807 else
3808 convert_move (target, temp, unsignedp);
3809 }
3810
3811 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3812 {
3813 /* Handle copying a string constant into an array.
3814 The string constant may be shorter than the array.
3815 So copy just the string's actual length, and clear the rest. */
3816 rtx size;
3817 rtx addr;
3818
3819 /* Get the size of the data type of the string,
3820 which is actually the size of the target. */
3821 size = expr_size (exp);
3822 if (GET_CODE (size) == CONST_INT
3823 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3824 emit_block_move (target, temp, size,
3825 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3826 else
3827 {
3828 /* Compute the size of the data to copy from the string. */
3829 tree copy_size
3830 = size_binop (MIN_EXPR,
3831 make_tree (sizetype, size),
3832 convert (sizetype,
3833 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3834 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3835 VOIDmode, 0);
3836 rtx label = 0;
3837
3838 /* Copy that much. */
3839 emit_block_move (target, temp, copy_size_rtx,
3840 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3841
3842 /* Figure out how much is left in TARGET that we have to clear.
3843 Do all calculations in ptr_mode. */
3844
3845 addr = XEXP (target, 0);
3846 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3847
3848 if (GET_CODE (copy_size_rtx) == CONST_INT)
3849 {
3850 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3851 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3852 }
3853 else
3854 {
3855 addr = force_reg (ptr_mode, addr);
3856 addr = expand_binop (ptr_mode, add_optab, addr,
3857 copy_size_rtx, NULL_RTX, 0,
3858 OPTAB_LIB_WIDEN);
3859
3860 size = expand_binop (ptr_mode, sub_optab, size,
3861 copy_size_rtx, NULL_RTX, 0,
3862 OPTAB_LIB_WIDEN);
3863
3864 label = gen_label_rtx ();
3865 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3866 GET_MODE (size), 0, 0, label);
3867 }
3868
3869 if (size != const0_rtx)
3870 {
3871 /* Be sure we can write on ADDR. */
3872 if (current_function_check_memory_usage)
3873 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3874 addr, Pmode,
3875 size, TYPE_MODE (sizetype),
3876 GEN_INT (MEMORY_USE_WO),
3877 TYPE_MODE (integer_type_node));
3878 #ifdef TARGET_MEM_FUNCTIONS
3879 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3880 addr, ptr_mode,
3881 const0_rtx, TYPE_MODE (integer_type_node),
3882 convert_to_mode (TYPE_MODE (sizetype),
3883 size,
3884 TREE_UNSIGNED (sizetype)),
3885 TYPE_MODE (sizetype));
3886 #else
3887 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3888 addr, ptr_mode,
3889 convert_to_mode (TYPE_MODE (integer_type_node),
3890 size,
3891 TREE_UNSIGNED (integer_type_node)),
3892 TYPE_MODE (integer_type_node));
3893 #endif
3894 }
3895
3896 if (label)
3897 emit_label (label);
3898 }
3899 }
3900 /* Handle calls that return values in multiple non-contiguous locations.
3901 The Irix 6 ABI has examples of this. */
3902 else if (GET_CODE (target) == PARALLEL)
3903 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3904 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3905 else if (GET_MODE (temp) == BLKmode)
3906 emit_block_move (target, temp, expr_size (exp),
3907 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3908 else
3909 emit_move_insn (target, temp);
3910 }
3911
3912 /* If we don't want a value, return NULL_RTX. */
3913 if (! want_value)
3914 return NULL_RTX;
3915
3916 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3917 ??? The latter test doesn't seem to make sense. */
3918 else if (dont_return_target && GET_CODE (temp) != MEM)
3919 return temp;
3920
3921 /* Return TARGET itself if it is a hard register. */
3922 else if (want_value && GET_MODE (target) != BLKmode
3923 && ! (GET_CODE (target) == REG
3924 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3925 return copy_to_reg (target);
3926
3927 else
3928 return target;
3929 }
3930 \f
3931 /* Return 1 if EXP just contains zeros. */
3932
3933 static int
3934 is_zeros_p (exp)
3935 tree exp;
3936 {
3937 tree elt;
3938
3939 switch (TREE_CODE (exp))
3940 {
3941 case CONVERT_EXPR:
3942 case NOP_EXPR:
3943 case NON_LVALUE_EXPR:
3944 return is_zeros_p (TREE_OPERAND (exp, 0));
3945
3946 case INTEGER_CST:
3947 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3948
3949 case COMPLEX_CST:
3950 return
3951 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3952
3953 case REAL_CST:
3954 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3955
3956 case CONSTRUCTOR:
3957 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3958 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3959 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3960 if (! is_zeros_p (TREE_VALUE (elt)))
3961 return 0;
3962
3963 return 1;
3964
3965 default:
3966 return 0;
3967 }
3968 }
3969
3970 /* Return 1 if EXP contains mostly (3/4) zeros. */
3971
3972 static int
3973 mostly_zeros_p (exp)
3974 tree exp;
3975 {
3976 if (TREE_CODE (exp) == CONSTRUCTOR)
3977 {
3978 int elts = 0, zeros = 0;
3979 tree elt = CONSTRUCTOR_ELTS (exp);
3980 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3981 {
3982 /* If there are no ranges of true bits, it is all zero. */
3983 return elt == NULL_TREE;
3984 }
3985 for (; elt; elt = TREE_CHAIN (elt))
3986 {
3987 /* We do not handle the case where the index is a RANGE_EXPR,
3988 so the statistic will be somewhat inaccurate.
3989 We do make a more accurate count in store_constructor itself,
3990 so since this function is only used for nested array elements,
3991 this should be close enough. */
3992 if (mostly_zeros_p (TREE_VALUE (elt)))
3993 zeros++;
3994 elts++;
3995 }
3996
3997 return 4 * zeros >= 3 * elts;
3998 }
3999
4000 return is_zeros_p (exp);
4001 }
4002 \f
4003 /* Helper function for store_constructor.
4004 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4005 TYPE is the type of the CONSTRUCTOR, not the element type.
4006 ALIGN and CLEARED are as for store_constructor.
4007
4008 This provides a recursive shortcut back to store_constructor when it isn't
4009 necessary to go through store_field. This is so that we can pass through
4010 the cleared field to let store_constructor know that we may not have to
4011 clear a substructure if the outer structure has already been cleared. */
4012
4013 static void
4014 store_constructor_field (target, bitsize, bitpos,
4015 mode, exp, type, align, cleared)
4016 rtx target;
4017 int bitsize, bitpos;
4018 enum machine_mode mode;
4019 tree exp, type;
4020 int align;
4021 int cleared;
4022 {
4023 if (TREE_CODE (exp) == CONSTRUCTOR
4024 && bitpos % BITS_PER_UNIT == 0
4025 /* If we have a non-zero bitpos for a register target, then we just
4026 let store_field do the bitfield handling. This is unlikely to
4027 generate unnecessary clear instructions anyways. */
4028 && (bitpos == 0 || GET_CODE (target) == MEM))
4029 {
4030 if (bitpos != 0)
4031 target = change_address (target, VOIDmode,
4032 plus_constant (XEXP (target, 0),
4033 bitpos / BITS_PER_UNIT));
4034 store_constructor (exp, target, align, cleared);
4035 }
4036 else
4037 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4038 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4039 int_size_in_bytes (type), cleared);
4040 }
4041
4042 /* Store the value of constructor EXP into the rtx TARGET.
4043 TARGET is either a REG or a MEM.
4044 ALIGN is the maximum known alignment for TARGET, in bits.
4045 CLEARED is true if TARGET is known to have been zero'd. */
4046
4047 static void
4048 store_constructor (exp, target, align, cleared)
4049 tree exp;
4050 rtx target;
4051 int align;
4052 int cleared;
4053 {
4054 tree type = TREE_TYPE (exp);
4055 #ifdef WORD_REGISTER_OPERATIONS
4056 rtx exp_size = expr_size (exp);
4057 #endif
4058
4059 /* We know our target cannot conflict, since safe_from_p has been called. */
4060 #if 0
4061 /* Don't try copying piece by piece into a hard register
4062 since that is vulnerable to being clobbered by EXP.
4063 Instead, construct in a pseudo register and then copy it all. */
4064 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4065 {
4066 rtx temp = gen_reg_rtx (GET_MODE (target));
4067 store_constructor (exp, temp, 0);
4068 emit_move_insn (target, temp);
4069 return;
4070 }
4071 #endif
4072
4073 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4074 || TREE_CODE (type) == QUAL_UNION_TYPE)
4075 {
4076 register tree elt;
4077
4078 /* Inform later passes that the whole union value is dead. */
4079 if (TREE_CODE (type) == UNION_TYPE
4080 || TREE_CODE (type) == QUAL_UNION_TYPE)
4081 {
4082 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4083
4084 /* If the constructor is empty, clear the union. */
4085 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4086 clear_storage (target, expr_size (exp),
4087 TYPE_ALIGN (type) / BITS_PER_UNIT);
4088 }
4089
4090 /* If we are building a static constructor into a register,
4091 set the initial value as zero so we can fold the value into
4092 a constant. But if more than one register is involved,
4093 this probably loses. */
4094 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4095 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4096 {
4097 if (! cleared)
4098 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4099
4100 cleared = 1;
4101 }
4102
4103 /* If the constructor has fewer fields than the structure
4104 or if we are initializing the structure to mostly zeros,
4105 clear the whole structure first. */
4106 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4107 != list_length (TYPE_FIELDS (type)))
4108 || mostly_zeros_p (exp))
4109 {
4110 if (! cleared)
4111 clear_storage (target, expr_size (exp),
4112 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4113
4114 cleared = 1;
4115 }
4116 else
4117 /* Inform later passes that the old value is dead. */
4118 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4119
4120 /* Store each element of the constructor into
4121 the corresponding field of TARGET. */
4122
4123 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4124 {
4125 register tree field = TREE_PURPOSE (elt);
4126 #ifdef WORD_REGISTER_OPERATIONS
4127 tree value = TREE_VALUE (elt);
4128 #endif
4129 register enum machine_mode mode;
4130 int bitsize;
4131 int bitpos = 0;
4132 int unsignedp;
4133 tree pos, constant = 0, offset = 0;
4134 rtx to_rtx = target;
4135
4136 /* Just ignore missing fields.
4137 We cleared the whole structure, above,
4138 if any fields are missing. */
4139 if (field == 0)
4140 continue;
4141
4142 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4143 continue;
4144
4145 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4146 unsignedp = TREE_UNSIGNED (field);
4147 mode = DECL_MODE (field);
4148 if (DECL_BIT_FIELD (field))
4149 mode = VOIDmode;
4150
4151 pos = DECL_FIELD_BITPOS (field);
4152 if (TREE_CODE (pos) == INTEGER_CST)
4153 constant = pos;
4154 else if (TREE_CODE (pos) == PLUS_EXPR
4155 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4156 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4157 else
4158 offset = pos;
4159
4160 if (constant)
4161 bitpos = TREE_INT_CST_LOW (constant);
4162
4163 if (offset)
4164 {
4165 rtx offset_rtx;
4166
4167 if (contains_placeholder_p (offset))
4168 offset = build (WITH_RECORD_EXPR, sizetype,
4169 offset, make_tree (TREE_TYPE (exp), target));
4170
4171 offset = size_binop (FLOOR_DIV_EXPR, offset,
4172 size_int (BITS_PER_UNIT));
4173
4174 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4175 if (GET_CODE (to_rtx) != MEM)
4176 abort ();
4177
4178 if (GET_MODE (offset_rtx) != ptr_mode)
4179 {
4180 #ifdef POINTERS_EXTEND_UNSIGNED
4181 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4182 #else
4183 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4184 #endif
4185 }
4186
4187 to_rtx
4188 = change_address (to_rtx, VOIDmode,
4189 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4190 force_reg (ptr_mode,
4191 offset_rtx)));
4192 }
4193
4194 if (TREE_READONLY (field))
4195 {
4196 if (GET_CODE (to_rtx) == MEM)
4197 to_rtx = copy_rtx (to_rtx);
4198
4199 RTX_UNCHANGING_P (to_rtx) = 1;
4200 }
4201
4202 #ifdef WORD_REGISTER_OPERATIONS
4203 /* If this initializes a field that is smaller than a word, at the
4204 start of a word, try to widen it to a full word.
4205 This special case allows us to output C++ member function
4206 initializations in a form that the optimizers can understand. */
4207 if (constant
4208 && GET_CODE (target) == REG
4209 && bitsize < BITS_PER_WORD
4210 && bitpos % BITS_PER_WORD == 0
4211 && GET_MODE_CLASS (mode) == MODE_INT
4212 && TREE_CODE (value) == INTEGER_CST
4213 && GET_CODE (exp_size) == CONST_INT
4214 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4215 {
4216 tree type = TREE_TYPE (value);
4217 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4218 {
4219 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4220 value = convert (type, value);
4221 }
4222 if (BYTES_BIG_ENDIAN)
4223 value
4224 = fold (build (LSHIFT_EXPR, type, value,
4225 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4226 bitsize = BITS_PER_WORD;
4227 mode = word_mode;
4228 }
4229 #endif
4230 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4231 TREE_VALUE (elt), type,
4232 MIN (align,
4233 DECL_ALIGN (TREE_PURPOSE (elt))),
4234 cleared);
4235 }
4236 }
4237 else if (TREE_CODE (type) == ARRAY_TYPE)
4238 {
4239 register tree elt;
4240 register int i;
4241 int need_to_clear;
4242 tree domain = TYPE_DOMAIN (type);
4243 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4244 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4245 tree elttype = TREE_TYPE (type);
4246
4247 /* If the constructor has fewer elements than the array,
4248 clear the whole array first. Similarly if this is
4249 static constructor of a non-BLKmode object. */
4250 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4251 need_to_clear = 1;
4252 else
4253 {
4254 HOST_WIDE_INT count = 0, zero_count = 0;
4255 need_to_clear = 0;
4256 /* This loop is a more accurate version of the loop in
4257 mostly_zeros_p (it handles RANGE_EXPR in an index).
4258 It is also needed to check for missing elements. */
4259 for (elt = CONSTRUCTOR_ELTS (exp);
4260 elt != NULL_TREE;
4261 elt = TREE_CHAIN (elt))
4262 {
4263 tree index = TREE_PURPOSE (elt);
4264 HOST_WIDE_INT this_node_count;
4265 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4266 {
4267 tree lo_index = TREE_OPERAND (index, 0);
4268 tree hi_index = TREE_OPERAND (index, 1);
4269 if (TREE_CODE (lo_index) != INTEGER_CST
4270 || TREE_CODE (hi_index) != INTEGER_CST)
4271 {
4272 need_to_clear = 1;
4273 break;
4274 }
4275 this_node_count = TREE_INT_CST_LOW (hi_index)
4276 - TREE_INT_CST_LOW (lo_index) + 1;
4277 }
4278 else
4279 this_node_count = 1;
4280 count += this_node_count;
4281 if (mostly_zeros_p (TREE_VALUE (elt)))
4282 zero_count += this_node_count;
4283 }
4284 /* Clear the entire array first if there are any missing elements,
4285 or if the incidence of zero elements is >= 75%. */
4286 if (count < maxelt - minelt + 1
4287 || 4 * zero_count >= 3 * count)
4288 need_to_clear = 1;
4289 }
4290 if (need_to_clear)
4291 {
4292 if (! cleared)
4293 clear_storage (target, expr_size (exp),
4294 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4295 cleared = 1;
4296 }
4297 else
4298 /* Inform later passes that the old value is dead. */
4299 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4300
4301 /* Store each element of the constructor into
4302 the corresponding element of TARGET, determined
4303 by counting the elements. */
4304 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4305 elt;
4306 elt = TREE_CHAIN (elt), i++)
4307 {
4308 register enum machine_mode mode;
4309 int bitsize;
4310 int bitpos;
4311 int unsignedp;
4312 tree value = TREE_VALUE (elt);
4313 int align = TYPE_ALIGN (TREE_TYPE (value));
4314 tree index = TREE_PURPOSE (elt);
4315 rtx xtarget = target;
4316
4317 if (cleared && is_zeros_p (value))
4318 continue;
4319
4320 mode = TYPE_MODE (elttype);
4321 bitsize = GET_MODE_BITSIZE (mode);
4322 unsignedp = TREE_UNSIGNED (elttype);
4323
4324 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4325 {
4326 tree lo_index = TREE_OPERAND (index, 0);
4327 tree hi_index = TREE_OPERAND (index, 1);
4328 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4329 struct nesting *loop;
4330 HOST_WIDE_INT lo, hi, count;
4331 tree position;
4332
4333 /* If the range is constant and "small", unroll the loop. */
4334 if (TREE_CODE (lo_index) == INTEGER_CST
4335 && TREE_CODE (hi_index) == INTEGER_CST
4336 && (lo = TREE_INT_CST_LOW (lo_index),
4337 hi = TREE_INT_CST_LOW (hi_index),
4338 count = hi - lo + 1,
4339 (GET_CODE (target) != MEM
4340 || count <= 2
4341 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4342 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4343 <= 40 * 8))))
4344 {
4345 lo -= minelt; hi -= minelt;
4346 for (; lo <= hi; lo++)
4347 {
4348 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4349 store_constructor_field (target, bitsize, bitpos, mode,
4350 value, type, align, cleared);
4351 }
4352 }
4353 else
4354 {
4355 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4356 loop_top = gen_label_rtx ();
4357 loop_end = gen_label_rtx ();
4358
4359 unsignedp = TREE_UNSIGNED (domain);
4360
4361 index = build_decl (VAR_DECL, NULL_TREE, domain);
4362
4363 DECL_RTL (index) = index_r
4364 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4365 &unsignedp, 0));
4366
4367 if (TREE_CODE (value) == SAVE_EXPR
4368 && SAVE_EXPR_RTL (value) == 0)
4369 {
4370 /* Make sure value gets expanded once before the
4371 loop. */
4372 expand_expr (value, const0_rtx, VOIDmode, 0);
4373 emit_queue ();
4374 }
4375 store_expr (lo_index, index_r, 0);
4376 loop = expand_start_loop (0);
4377
4378 /* Assign value to element index. */
4379 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4380 size_int (BITS_PER_UNIT));
4381 position = size_binop (MULT_EXPR,
4382 size_binop (MINUS_EXPR, index,
4383 TYPE_MIN_VALUE (domain)),
4384 position);
4385 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4386 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4387 xtarget = change_address (target, mode, addr);
4388 if (TREE_CODE (value) == CONSTRUCTOR)
4389 store_constructor (value, xtarget, align, cleared);
4390 else
4391 store_expr (value, xtarget, 0);
4392
4393 expand_exit_loop_if_false (loop,
4394 build (LT_EXPR, integer_type_node,
4395 index, hi_index));
4396
4397 expand_increment (build (PREINCREMENT_EXPR,
4398 TREE_TYPE (index),
4399 index, integer_one_node), 0, 0);
4400 expand_end_loop ();
4401 emit_label (loop_end);
4402
4403 /* Needed by stupid register allocation. to extend the
4404 lifetime of pseudo-regs used by target past the end
4405 of the loop. */
4406 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4407 }
4408 }
4409 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4410 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4411 {
4412 rtx pos_rtx, addr;
4413 tree position;
4414
4415 if (index == 0)
4416 index = size_int (i);
4417
4418 if (minelt)
4419 index = size_binop (MINUS_EXPR, index,
4420 TYPE_MIN_VALUE (domain));
4421 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4422 size_int (BITS_PER_UNIT));
4423 position = size_binop (MULT_EXPR, index, position);
4424 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4425 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4426 xtarget = change_address (target, mode, addr);
4427 store_expr (value, xtarget, 0);
4428 }
4429 else
4430 {
4431 if (index != 0)
4432 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4433 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4434 else
4435 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4436 store_constructor_field (target, bitsize, bitpos, mode, value,
4437 type, align, cleared);
4438 }
4439 }
4440 }
4441 /* set constructor assignments */
4442 else if (TREE_CODE (type) == SET_TYPE)
4443 {
4444 tree elt = CONSTRUCTOR_ELTS (exp);
4445 int nbytes = int_size_in_bytes (type), nbits;
4446 tree domain = TYPE_DOMAIN (type);
4447 tree domain_min, domain_max, bitlength;
4448
4449 /* The default implementation strategy is to extract the constant
4450 parts of the constructor, use that to initialize the target,
4451 and then "or" in whatever non-constant ranges we need in addition.
4452
4453 If a large set is all zero or all ones, it is
4454 probably better to set it using memset (if available) or bzero.
4455 Also, if a large set has just a single range, it may also be
4456 better to first clear all the first clear the set (using
4457 bzero/memset), and set the bits we want. */
4458
4459 /* Check for all zeros. */
4460 if (elt == NULL_TREE)
4461 {
4462 if (!cleared)
4463 clear_storage (target, expr_size (exp),
4464 TYPE_ALIGN (type) / BITS_PER_UNIT);
4465 return;
4466 }
4467
4468 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4469 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4470 bitlength = size_binop (PLUS_EXPR,
4471 size_binop (MINUS_EXPR, domain_max, domain_min),
4472 size_one_node);
4473
4474 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4475 abort ();
4476 nbits = TREE_INT_CST_LOW (bitlength);
4477
4478 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4479 are "complicated" (more than one range), initialize (the
4480 constant parts) by copying from a constant. */
4481 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4482 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4483 {
4484 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4485 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4486 char *bit_buffer = (char *) alloca (nbits);
4487 HOST_WIDE_INT word = 0;
4488 int bit_pos = 0;
4489 int ibit = 0;
4490 int offset = 0; /* In bytes from beginning of set. */
4491 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4492 for (;;)
4493 {
4494 if (bit_buffer[ibit])
4495 {
4496 if (BYTES_BIG_ENDIAN)
4497 word |= (1 << (set_word_size - 1 - bit_pos));
4498 else
4499 word |= 1 << bit_pos;
4500 }
4501 bit_pos++; ibit++;
4502 if (bit_pos >= set_word_size || ibit == nbits)
4503 {
4504 if (word != 0 || ! cleared)
4505 {
4506 rtx datum = GEN_INT (word);
4507 rtx to_rtx;
4508 /* The assumption here is that it is safe to use
4509 XEXP if the set is multi-word, but not if
4510 it's single-word. */
4511 if (GET_CODE (target) == MEM)
4512 {
4513 to_rtx = plus_constant (XEXP (target, 0), offset);
4514 to_rtx = change_address (target, mode, to_rtx);
4515 }
4516 else if (offset == 0)
4517 to_rtx = target;
4518 else
4519 abort ();
4520 emit_move_insn (to_rtx, datum);
4521 }
4522 if (ibit == nbits)
4523 break;
4524 word = 0;
4525 bit_pos = 0;
4526 offset += set_word_size / BITS_PER_UNIT;
4527 }
4528 }
4529 }
4530 else if (!cleared)
4531 {
4532 /* Don't bother clearing storage if the set is all ones. */
4533 if (TREE_CHAIN (elt) != NULL_TREE
4534 || (TREE_PURPOSE (elt) == NULL_TREE
4535 ? nbits != 1
4536 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4537 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4538 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4539 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4540 != nbits))))
4541 clear_storage (target, expr_size (exp),
4542 TYPE_ALIGN (type) / BITS_PER_UNIT);
4543 }
4544
4545 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4546 {
4547 /* start of range of element or NULL */
4548 tree startbit = TREE_PURPOSE (elt);
4549 /* end of range of element, or element value */
4550 tree endbit = TREE_VALUE (elt);
4551 #ifdef TARGET_MEM_FUNCTIONS
4552 HOST_WIDE_INT startb, endb;
4553 #endif
4554 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4555
4556 bitlength_rtx = expand_expr (bitlength,
4557 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4558
4559 /* handle non-range tuple element like [ expr ] */
4560 if (startbit == NULL_TREE)
4561 {
4562 startbit = save_expr (endbit);
4563 endbit = startbit;
4564 }
4565 startbit = convert (sizetype, startbit);
4566 endbit = convert (sizetype, endbit);
4567 if (! integer_zerop (domain_min))
4568 {
4569 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4570 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4571 }
4572 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4573 EXPAND_CONST_ADDRESS);
4574 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4575 EXPAND_CONST_ADDRESS);
4576
4577 if (REG_P (target))
4578 {
4579 targetx = assign_stack_temp (GET_MODE (target),
4580 GET_MODE_SIZE (GET_MODE (target)),
4581 0);
4582 emit_move_insn (targetx, target);
4583 }
4584 else if (GET_CODE (target) == MEM)
4585 targetx = target;
4586 else
4587 abort ();
4588
4589 #ifdef TARGET_MEM_FUNCTIONS
4590 /* Optimization: If startbit and endbit are
4591 constants divisible by BITS_PER_UNIT,
4592 call memset instead. */
4593 if (TREE_CODE (startbit) == INTEGER_CST
4594 && TREE_CODE (endbit) == INTEGER_CST
4595 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4596 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4597 {
4598 emit_library_call (memset_libfunc, 0,
4599 VOIDmode, 3,
4600 plus_constant (XEXP (targetx, 0),
4601 startb / BITS_PER_UNIT),
4602 Pmode,
4603 constm1_rtx, TYPE_MODE (integer_type_node),
4604 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4605 TYPE_MODE (sizetype));
4606 }
4607 else
4608 #endif
4609 {
4610 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4611 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4612 bitlength_rtx, TYPE_MODE (sizetype),
4613 startbit_rtx, TYPE_MODE (sizetype),
4614 endbit_rtx, TYPE_MODE (sizetype));
4615 }
4616 if (REG_P (target))
4617 emit_move_insn (target, targetx);
4618 }
4619 }
4620
4621 else
4622 abort ();
4623 }
4624
4625 /* Store the value of EXP (an expression tree)
4626 into a subfield of TARGET which has mode MODE and occupies
4627 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4628 If MODE is VOIDmode, it means that we are storing into a bit-field.
4629
4630 If VALUE_MODE is VOIDmode, return nothing in particular.
4631 UNSIGNEDP is not used in this case.
4632
4633 Otherwise, return an rtx for the value stored. This rtx
4634 has mode VALUE_MODE if that is convenient to do.
4635 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4636
4637 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4638 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4639
4640 ALIAS_SET is the alias set for the destination. This value will
4641 (in general) be different from that for TARGET, since TARGET is a
4642 reference to the containing structure. */
4643
4644 static rtx
4645 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4646 unsignedp, align, total_size, alias_set)
4647 rtx target;
4648 int bitsize, bitpos;
4649 enum machine_mode mode;
4650 tree exp;
4651 enum machine_mode value_mode;
4652 int unsignedp;
4653 int align;
4654 int total_size;
4655 int alias_set;
4656 {
4657 HOST_WIDE_INT width_mask = 0;
4658
4659 if (TREE_CODE (exp) == ERROR_MARK)
4660 return const0_rtx;
4661
4662 if (bitsize < HOST_BITS_PER_WIDE_INT)
4663 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4664
4665 /* If we are storing into an unaligned field of an aligned union that is
4666 in a register, we may have the mode of TARGET being an integer mode but
4667 MODE == BLKmode. In that case, get an aligned object whose size and
4668 alignment are the same as TARGET and store TARGET into it (we can avoid
4669 the store if the field being stored is the entire width of TARGET). Then
4670 call ourselves recursively to store the field into a BLKmode version of
4671 that object. Finally, load from the object into TARGET. This is not
4672 very efficient in general, but should only be slightly more expensive
4673 than the otherwise-required unaligned accesses. Perhaps this can be
4674 cleaned up later. */
4675
4676 if (mode == BLKmode
4677 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4678 {
4679 rtx object = assign_stack_temp (GET_MODE (target),
4680 GET_MODE_SIZE (GET_MODE (target)), 0);
4681 rtx blk_object = copy_rtx (object);
4682
4683 MEM_SET_IN_STRUCT_P (object, 1);
4684 MEM_SET_IN_STRUCT_P (blk_object, 1);
4685 PUT_MODE (blk_object, BLKmode);
4686
4687 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4688 emit_move_insn (object, target);
4689
4690 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4691 align, total_size, alias_set);
4692
4693 /* Even though we aren't returning target, we need to
4694 give it the updated value. */
4695 emit_move_insn (target, object);
4696
4697 return blk_object;
4698 }
4699
4700 /* If the structure is in a register or if the component
4701 is a bit field, we cannot use addressing to access it.
4702 Use bit-field techniques or SUBREG to store in it. */
4703
4704 if (mode == VOIDmode
4705 || (mode != BLKmode && ! direct_store[(int) mode]
4706 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4707 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4708 || GET_CODE (target) == REG
4709 || GET_CODE (target) == SUBREG
4710 /* If the field isn't aligned enough to store as an ordinary memref,
4711 store it as a bit field. */
4712 || (SLOW_UNALIGNED_ACCESS
4713 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4714 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4715 {
4716 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4717
4718 /* If BITSIZE is narrower than the size of the type of EXP
4719 we will be narrowing TEMP. Normally, what's wanted are the
4720 low-order bits. However, if EXP's type is a record and this is
4721 big-endian machine, we want the upper BITSIZE bits. */
4722 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4723 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4724 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4725 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4726 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4727 - bitsize),
4728 temp, 1);
4729
4730 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4731 MODE. */
4732 if (mode != VOIDmode && mode != BLKmode
4733 && mode != TYPE_MODE (TREE_TYPE (exp)))
4734 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4735
4736 /* If the modes of TARGET and TEMP are both BLKmode, both
4737 must be in memory and BITPOS must be aligned on a byte
4738 boundary. If so, we simply do a block copy. */
4739 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4740 {
4741 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4742 || bitpos % BITS_PER_UNIT != 0)
4743 abort ();
4744
4745 target = change_address (target, VOIDmode,
4746 plus_constant (XEXP (target, 0),
4747 bitpos / BITS_PER_UNIT));
4748
4749 emit_block_move (target, temp,
4750 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4751 / BITS_PER_UNIT),
4752 1);
4753
4754 return value_mode == VOIDmode ? const0_rtx : target;
4755 }
4756
4757 /* Store the value in the bitfield. */
4758 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4759 if (value_mode != VOIDmode)
4760 {
4761 /* The caller wants an rtx for the value. */
4762 /* If possible, avoid refetching from the bitfield itself. */
4763 if (width_mask != 0
4764 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4765 {
4766 tree count;
4767 enum machine_mode tmode;
4768
4769 if (unsignedp)
4770 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4771 tmode = GET_MODE (temp);
4772 if (tmode == VOIDmode)
4773 tmode = value_mode;
4774 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4775 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4776 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4777 }
4778 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4779 NULL_RTX, value_mode, 0, align,
4780 total_size);
4781 }
4782 return const0_rtx;
4783 }
4784 else
4785 {
4786 rtx addr = XEXP (target, 0);
4787 rtx to_rtx;
4788
4789 /* If a value is wanted, it must be the lhs;
4790 so make the address stable for multiple use. */
4791
4792 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4793 && ! CONSTANT_ADDRESS_P (addr)
4794 /* A frame-pointer reference is already stable. */
4795 && ! (GET_CODE (addr) == PLUS
4796 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4797 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4798 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4799 addr = copy_to_reg (addr);
4800
4801 /* Now build a reference to just the desired component. */
4802
4803 to_rtx = copy_rtx (change_address (target, mode,
4804 plus_constant (addr,
4805 (bitpos
4806 / BITS_PER_UNIT))));
4807 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4808 MEM_ALIAS_SET (to_rtx) = alias_set;
4809
4810 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4811 }
4812 }
4813 \f
4814 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4815 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4816 ARRAY_REFs and find the ultimate containing object, which we return.
4817
4818 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4819 bit position, and *PUNSIGNEDP to the signedness of the field.
4820 If the position of the field is variable, we store a tree
4821 giving the variable offset (in units) in *POFFSET.
4822 This offset is in addition to the bit position.
4823 If the position is not variable, we store 0 in *POFFSET.
4824 We set *PALIGNMENT to the alignment in bytes of the address that will be
4825 computed. This is the alignment of the thing we return if *POFFSET
4826 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4827
4828 If any of the extraction expressions is volatile,
4829 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4830
4831 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4832 is a mode that can be used to access the field. In that case, *PBITSIZE
4833 is redundant.
4834
4835 If the field describes a variable-sized object, *PMODE is set to
4836 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4837 this case, but the address of the object can be found. */
4838
4839 tree
4840 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4841 punsignedp, pvolatilep, palignment)
4842 tree exp;
4843 int *pbitsize;
4844 int *pbitpos;
4845 tree *poffset;
4846 enum machine_mode *pmode;
4847 int *punsignedp;
4848 int *pvolatilep;
4849 int *palignment;
4850 {
4851 tree orig_exp = exp;
4852 tree size_tree = 0;
4853 enum machine_mode mode = VOIDmode;
4854 tree offset = integer_zero_node;
4855 unsigned int alignment = BIGGEST_ALIGNMENT;
4856
4857 if (TREE_CODE (exp) == COMPONENT_REF)
4858 {
4859 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4860 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4861 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4862 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4863 }
4864 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4865 {
4866 size_tree = TREE_OPERAND (exp, 1);
4867 *punsignedp = TREE_UNSIGNED (exp);
4868 }
4869 else
4870 {
4871 mode = TYPE_MODE (TREE_TYPE (exp));
4872 if (mode == BLKmode)
4873 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4874
4875 *pbitsize = GET_MODE_BITSIZE (mode);
4876 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4877 }
4878
4879 if (size_tree)
4880 {
4881 if (TREE_CODE (size_tree) != INTEGER_CST)
4882 mode = BLKmode, *pbitsize = -1;
4883 else
4884 *pbitsize = TREE_INT_CST_LOW (size_tree);
4885 }
4886
4887 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4888 and find the ultimate containing object. */
4889
4890 *pbitpos = 0;
4891
4892 while (1)
4893 {
4894 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4895 {
4896 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4897 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4898 : TREE_OPERAND (exp, 2));
4899 tree constant = integer_zero_node, var = pos;
4900
4901 /* If this field hasn't been filled in yet, don't go
4902 past it. This should only happen when folding expressions
4903 made during type construction. */
4904 if (pos == 0)
4905 break;
4906
4907 /* Assume here that the offset is a multiple of a unit.
4908 If not, there should be an explicitly added constant. */
4909 if (TREE_CODE (pos) == PLUS_EXPR
4910 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4911 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4912 else if (TREE_CODE (pos) == INTEGER_CST)
4913 constant = pos, var = integer_zero_node;
4914
4915 *pbitpos += TREE_INT_CST_LOW (constant);
4916 offset = size_binop (PLUS_EXPR, offset,
4917 size_binop (EXACT_DIV_EXPR, var,
4918 size_int (BITS_PER_UNIT)));
4919 }
4920
4921 else if (TREE_CODE (exp) == ARRAY_REF)
4922 {
4923 /* This code is based on the code in case ARRAY_REF in expand_expr
4924 below. We assume here that the size of an array element is
4925 always an integral multiple of BITS_PER_UNIT. */
4926
4927 tree index = TREE_OPERAND (exp, 1);
4928 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4929 tree low_bound
4930 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4931 tree index_type = TREE_TYPE (index);
4932 tree xindex;
4933
4934 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4935 {
4936 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4937 index);
4938 index_type = TREE_TYPE (index);
4939 }
4940
4941 /* Optimize the special-case of a zero lower bound.
4942
4943 We convert the low_bound to sizetype to avoid some problems
4944 with constant folding. (E.g. suppose the lower bound is 1,
4945 and its mode is QI. Without the conversion, (ARRAY
4946 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4947 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4948
4949 But sizetype isn't quite right either (especially if
4950 the lowbound is negative). FIXME */
4951
4952 if (! integer_zerop (low_bound))
4953 index = fold (build (MINUS_EXPR, index_type, index,
4954 convert (sizetype, low_bound)));
4955
4956 if (TREE_CODE (index) == INTEGER_CST)
4957 {
4958 index = convert (sbitsizetype, index);
4959 index_type = TREE_TYPE (index);
4960 }
4961
4962 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4963 convert (sbitsizetype,
4964 TYPE_SIZE (TREE_TYPE (exp)))));
4965
4966 if (TREE_CODE (xindex) == INTEGER_CST
4967 && TREE_INT_CST_HIGH (xindex) == 0)
4968 *pbitpos += TREE_INT_CST_LOW (xindex);
4969 else
4970 {
4971 /* Either the bit offset calculated above is not constant, or
4972 it overflowed. In either case, redo the multiplication
4973 against the size in units. This is especially important
4974 in the non-constant case to avoid a division at runtime. */
4975 xindex = fold (build (MULT_EXPR, ssizetype, index,
4976 convert (ssizetype,
4977 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4978
4979 if (contains_placeholder_p (xindex))
4980 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4981
4982 offset = size_binop (PLUS_EXPR, offset, xindex);
4983 }
4984 }
4985 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4986 && ! ((TREE_CODE (exp) == NOP_EXPR
4987 || TREE_CODE (exp) == CONVERT_EXPR)
4988 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4989 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4990 != UNION_TYPE))
4991 && (TYPE_MODE (TREE_TYPE (exp))
4992 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4993 break;
4994
4995 /* If any reference in the chain is volatile, the effect is volatile. */
4996 if (TREE_THIS_VOLATILE (exp))
4997 *pvolatilep = 1;
4998
4999 /* If the offset is non-constant already, then we can't assume any
5000 alignment more than the alignment here. */
5001 if (! integer_zerop (offset))
5002 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5003
5004 exp = TREE_OPERAND (exp, 0);
5005 }
5006
5007 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5008 alignment = MIN (alignment, DECL_ALIGN (exp));
5009 else if (TREE_TYPE (exp) != 0)
5010 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5011
5012 if (integer_zerop (offset))
5013 offset = 0;
5014
5015 if (offset != 0 && contains_placeholder_p (offset))
5016 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5017
5018 *pmode = mode;
5019 *poffset = offset;
5020 *palignment = alignment / BITS_PER_UNIT;
5021 return exp;
5022 }
5023
5024 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5025 static enum memory_use_mode
5026 get_memory_usage_from_modifier (modifier)
5027 enum expand_modifier modifier;
5028 {
5029 switch (modifier)
5030 {
5031 case EXPAND_NORMAL:
5032 case EXPAND_SUM:
5033 return MEMORY_USE_RO;
5034 break;
5035 case EXPAND_MEMORY_USE_WO:
5036 return MEMORY_USE_WO;
5037 break;
5038 case EXPAND_MEMORY_USE_RW:
5039 return MEMORY_USE_RW;
5040 break;
5041 case EXPAND_MEMORY_USE_DONT:
5042 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5043 MEMORY_USE_DONT, because they are modifiers to a call of
5044 expand_expr in the ADDR_EXPR case of expand_expr. */
5045 case EXPAND_CONST_ADDRESS:
5046 case EXPAND_INITIALIZER:
5047 return MEMORY_USE_DONT;
5048 case EXPAND_MEMORY_USE_BAD:
5049 default:
5050 abort ();
5051 }
5052 }
5053 \f
5054 /* Given an rtx VALUE that may contain additions and multiplications,
5055 return an equivalent value that just refers to a register or memory.
5056 This is done by generating instructions to perform the arithmetic
5057 and returning a pseudo-register containing the value.
5058
5059 The returned value may be a REG, SUBREG, MEM or constant. */
5060
5061 rtx
5062 force_operand (value, target)
5063 rtx value, target;
5064 {
5065 register optab binoptab = 0;
5066 /* Use a temporary to force order of execution of calls to
5067 `force_operand'. */
5068 rtx tmp;
5069 register rtx op2;
5070 /* Use subtarget as the target for operand 0 of a binary operation. */
5071 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5072
5073 /* Check for a PIC address load. */
5074 if (flag_pic
5075 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5076 && XEXP (value, 0) == pic_offset_table_rtx
5077 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5078 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5079 || GET_CODE (XEXP (value, 1)) == CONST))
5080 {
5081 if (!subtarget)
5082 subtarget = gen_reg_rtx (GET_MODE (value));
5083 emit_move_insn (subtarget, value);
5084 return subtarget;
5085 }
5086
5087 if (GET_CODE (value) == PLUS)
5088 binoptab = add_optab;
5089 else if (GET_CODE (value) == MINUS)
5090 binoptab = sub_optab;
5091 else if (GET_CODE (value) == MULT)
5092 {
5093 op2 = XEXP (value, 1);
5094 if (!CONSTANT_P (op2)
5095 && !(GET_CODE (op2) == REG && op2 != subtarget))
5096 subtarget = 0;
5097 tmp = force_operand (XEXP (value, 0), subtarget);
5098 return expand_mult (GET_MODE (value), tmp,
5099 force_operand (op2, NULL_RTX),
5100 target, 0);
5101 }
5102
5103 if (binoptab)
5104 {
5105 op2 = XEXP (value, 1);
5106 if (!CONSTANT_P (op2)
5107 && !(GET_CODE (op2) == REG && op2 != subtarget))
5108 subtarget = 0;
5109 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5110 {
5111 binoptab = add_optab;
5112 op2 = negate_rtx (GET_MODE (value), op2);
5113 }
5114
5115 /* Check for an addition with OP2 a constant integer and our first
5116 operand a PLUS of a virtual register and something else. In that
5117 case, we want to emit the sum of the virtual register and the
5118 constant first and then add the other value. This allows virtual
5119 register instantiation to simply modify the constant rather than
5120 creating another one around this addition. */
5121 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5122 && GET_CODE (XEXP (value, 0)) == PLUS
5123 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5124 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5125 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5126 {
5127 rtx temp = expand_binop (GET_MODE (value), binoptab,
5128 XEXP (XEXP (value, 0), 0), op2,
5129 subtarget, 0, OPTAB_LIB_WIDEN);
5130 return expand_binop (GET_MODE (value), binoptab, temp,
5131 force_operand (XEXP (XEXP (value, 0), 1), 0),
5132 target, 0, OPTAB_LIB_WIDEN);
5133 }
5134
5135 tmp = force_operand (XEXP (value, 0), subtarget);
5136 return expand_binop (GET_MODE (value), binoptab, tmp,
5137 force_operand (op2, NULL_RTX),
5138 target, 0, OPTAB_LIB_WIDEN);
5139 /* We give UNSIGNEDP = 0 to expand_binop
5140 because the only operations we are expanding here are signed ones. */
5141 }
5142 return value;
5143 }
5144 \f
5145 /* Subroutine of expand_expr:
5146 save the non-copied parts (LIST) of an expr (LHS), and return a list
5147 which can restore these values to their previous values,
5148 should something modify their storage. */
5149
5150 static tree
5151 save_noncopied_parts (lhs, list)
5152 tree lhs;
5153 tree list;
5154 {
5155 tree tail;
5156 tree parts = 0;
5157
5158 for (tail = list; tail; tail = TREE_CHAIN (tail))
5159 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5160 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5161 else
5162 {
5163 tree part = TREE_VALUE (tail);
5164 tree part_type = TREE_TYPE (part);
5165 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5166 rtx target = assign_temp (part_type, 0, 1, 1);
5167 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5168 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5169 parts = tree_cons (to_be_saved,
5170 build (RTL_EXPR, part_type, NULL_TREE,
5171 (tree) target),
5172 parts);
5173 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5174 }
5175 return parts;
5176 }
5177
5178 /* Subroutine of expand_expr:
5179 record the non-copied parts (LIST) of an expr (LHS), and return a list
5180 which specifies the initial values of these parts. */
5181
5182 static tree
5183 init_noncopied_parts (lhs, list)
5184 tree lhs;
5185 tree list;
5186 {
5187 tree tail;
5188 tree parts = 0;
5189
5190 for (tail = list; tail; tail = TREE_CHAIN (tail))
5191 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5192 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5193 else if (TREE_PURPOSE (tail))
5194 {
5195 tree part = TREE_VALUE (tail);
5196 tree part_type = TREE_TYPE (part);
5197 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5198 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5199 }
5200 return parts;
5201 }
5202
5203 /* Subroutine of expand_expr: return nonzero iff there is no way that
5204 EXP can reference X, which is being modified. TOP_P is nonzero if this
5205 call is going to be used to determine whether we need a temporary
5206 for EXP, as opposed to a recursive call to this function.
5207
5208 It is always safe for this routine to return zero since it merely
5209 searches for optimization opportunities. */
5210
5211 static int
5212 safe_from_p (x, exp, top_p)
5213 rtx x;
5214 tree exp;
5215 int top_p;
5216 {
5217 rtx exp_rtl = 0;
5218 int i, nops;
5219 static int save_expr_count;
5220 static int save_expr_size = 0;
5221 static tree *save_expr_rewritten;
5222 static tree save_expr_trees[256];
5223
5224 if (x == 0
5225 /* If EXP has varying size, we MUST use a target since we currently
5226 have no way of allocating temporaries of variable size
5227 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5228 So we assume here that something at a higher level has prevented a
5229 clash. This is somewhat bogus, but the best we can do. Only
5230 do this when X is BLKmode and when we are at the top level. */
5231 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5232 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5233 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5234 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5235 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5236 != INTEGER_CST)
5237 && GET_MODE (x) == BLKmode))
5238 return 1;
5239
5240 if (top_p && save_expr_size == 0)
5241 {
5242 int rtn;
5243
5244 save_expr_count = 0;
5245 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5246 save_expr_rewritten = &save_expr_trees[0];
5247
5248 rtn = safe_from_p (x, exp, 1);
5249
5250 for (i = 0; i < save_expr_count; ++i)
5251 {
5252 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5253 abort ();
5254 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5255 }
5256
5257 save_expr_size = 0;
5258
5259 return rtn;
5260 }
5261
5262 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5263 find the underlying pseudo. */
5264 if (GET_CODE (x) == SUBREG)
5265 {
5266 x = SUBREG_REG (x);
5267 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5268 return 0;
5269 }
5270
5271 /* If X is a location in the outgoing argument area, it is always safe. */
5272 if (GET_CODE (x) == MEM
5273 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5274 || (GET_CODE (XEXP (x, 0)) == PLUS
5275 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5276 return 1;
5277
5278 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5279 {
5280 case 'd':
5281 exp_rtl = DECL_RTL (exp);
5282 break;
5283
5284 case 'c':
5285 return 1;
5286
5287 case 'x':
5288 if (TREE_CODE (exp) == TREE_LIST)
5289 return ((TREE_VALUE (exp) == 0
5290 || safe_from_p (x, TREE_VALUE (exp), 0))
5291 && (TREE_CHAIN (exp) == 0
5292 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5293 else if (TREE_CODE (exp) == ERROR_MARK)
5294 return 1; /* An already-visited SAVE_EXPR? */
5295 else
5296 return 0;
5297
5298 case '1':
5299 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5300
5301 case '2':
5302 case '<':
5303 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5304 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5305
5306 case 'e':
5307 case 'r':
5308 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5309 the expression. If it is set, we conflict iff we are that rtx or
5310 both are in memory. Otherwise, we check all operands of the
5311 expression recursively. */
5312
5313 switch (TREE_CODE (exp))
5314 {
5315 case ADDR_EXPR:
5316 return (staticp (TREE_OPERAND (exp, 0))
5317 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5318 || TREE_STATIC (exp));
5319
5320 case INDIRECT_REF:
5321 if (GET_CODE (x) == MEM)
5322 return 0;
5323 break;
5324
5325 case CALL_EXPR:
5326 exp_rtl = CALL_EXPR_RTL (exp);
5327 if (exp_rtl == 0)
5328 {
5329 /* Assume that the call will clobber all hard registers and
5330 all of memory. */
5331 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5332 || GET_CODE (x) == MEM)
5333 return 0;
5334 }
5335
5336 break;
5337
5338 case RTL_EXPR:
5339 /* If a sequence exists, we would have to scan every instruction
5340 in the sequence to see if it was safe. This is probably not
5341 worthwhile. */
5342 if (RTL_EXPR_SEQUENCE (exp))
5343 return 0;
5344
5345 exp_rtl = RTL_EXPR_RTL (exp);
5346 break;
5347
5348 case WITH_CLEANUP_EXPR:
5349 exp_rtl = RTL_EXPR_RTL (exp);
5350 break;
5351
5352 case CLEANUP_POINT_EXPR:
5353 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5354
5355 case SAVE_EXPR:
5356 exp_rtl = SAVE_EXPR_RTL (exp);
5357 if (exp_rtl)
5358 break;
5359
5360 /* This SAVE_EXPR might appear many times in the top-level
5361 safe_from_p() expression, and if it has a complex
5362 subexpression, examining it multiple times could result
5363 in a combinatorial explosion. E.g. on an Alpha
5364 running at least 200MHz, a Fortran test case compiled with
5365 optimization took about 28 minutes to compile -- even though
5366 it was only a few lines long, and the complicated line causing
5367 so much time to be spent in the earlier version of safe_from_p()
5368 had only 293 or so unique nodes.
5369
5370 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5371 where it is so we can turn it back in the top-level safe_from_p()
5372 when we're done. */
5373
5374 /* For now, don't bother re-sizing the array. */
5375 if (save_expr_count >= save_expr_size)
5376 return 0;
5377 save_expr_rewritten[save_expr_count++] = exp;
5378
5379 nops = tree_code_length[(int) SAVE_EXPR];
5380 for (i = 0; i < nops; i++)
5381 {
5382 tree operand = TREE_OPERAND (exp, i);
5383 if (operand == NULL_TREE)
5384 continue;
5385 TREE_SET_CODE (exp, ERROR_MARK);
5386 if (!safe_from_p (x, operand, 0))
5387 return 0;
5388 TREE_SET_CODE (exp, SAVE_EXPR);
5389 }
5390 TREE_SET_CODE (exp, ERROR_MARK);
5391 return 1;
5392
5393 case BIND_EXPR:
5394 /* The only operand we look at is operand 1. The rest aren't
5395 part of the expression. */
5396 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5397
5398 case METHOD_CALL_EXPR:
5399 /* This takes a rtx argument, but shouldn't appear here. */
5400 abort ();
5401
5402 default:
5403 break;
5404 }
5405
5406 /* If we have an rtx, we do not need to scan our operands. */
5407 if (exp_rtl)
5408 break;
5409
5410 nops = tree_code_length[(int) TREE_CODE (exp)];
5411 for (i = 0; i < nops; i++)
5412 if (TREE_OPERAND (exp, i) != 0
5413 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5414 return 0;
5415 }
5416
5417 /* If we have an rtl, find any enclosed object. Then see if we conflict
5418 with it. */
5419 if (exp_rtl)
5420 {
5421 if (GET_CODE (exp_rtl) == SUBREG)
5422 {
5423 exp_rtl = SUBREG_REG (exp_rtl);
5424 if (GET_CODE (exp_rtl) == REG
5425 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5426 return 0;
5427 }
5428
5429 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5430 are memory and EXP is not readonly. */
5431 return ! (rtx_equal_p (x, exp_rtl)
5432 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5433 && ! TREE_READONLY (exp)));
5434 }
5435
5436 /* If we reach here, it is safe. */
5437 return 1;
5438 }
5439
5440 /* Subroutine of expand_expr: return nonzero iff EXP is an
5441 expression whose type is statically determinable. */
5442
5443 static int
5444 fixed_type_p (exp)
5445 tree exp;
5446 {
5447 if (TREE_CODE (exp) == PARM_DECL
5448 || TREE_CODE (exp) == VAR_DECL
5449 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5450 || TREE_CODE (exp) == COMPONENT_REF
5451 || TREE_CODE (exp) == ARRAY_REF)
5452 return 1;
5453 return 0;
5454 }
5455
5456 /* Subroutine of expand_expr: return rtx if EXP is a
5457 variable or parameter; else return 0. */
5458
5459 static rtx
5460 var_rtx (exp)
5461 tree exp;
5462 {
5463 STRIP_NOPS (exp);
5464 switch (TREE_CODE (exp))
5465 {
5466 case PARM_DECL:
5467 case VAR_DECL:
5468 return DECL_RTL (exp);
5469 default:
5470 return 0;
5471 }
5472 }
5473
5474 #ifdef MAX_INTEGER_COMPUTATION_MODE
5475 void
5476 check_max_integer_computation_mode (exp)
5477 tree exp;
5478 {
5479 enum tree_code code;
5480 enum machine_mode mode;
5481
5482 /* Strip any NOPs that don't change the mode. */
5483 STRIP_NOPS (exp);
5484 code = TREE_CODE (exp);
5485
5486 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5487 if (code == NOP_EXPR
5488 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5489 return;
5490
5491 /* First check the type of the overall operation. We need only look at
5492 unary, binary and relational operations. */
5493 if (TREE_CODE_CLASS (code) == '1'
5494 || TREE_CODE_CLASS (code) == '2'
5495 || TREE_CODE_CLASS (code) == '<')
5496 {
5497 mode = TYPE_MODE (TREE_TYPE (exp));
5498 if (GET_MODE_CLASS (mode) == MODE_INT
5499 && mode > MAX_INTEGER_COMPUTATION_MODE)
5500 fatal ("unsupported wide integer operation");
5501 }
5502
5503 /* Check operand of a unary op. */
5504 if (TREE_CODE_CLASS (code) == '1')
5505 {
5506 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5507 if (GET_MODE_CLASS (mode) == MODE_INT
5508 && mode > MAX_INTEGER_COMPUTATION_MODE)
5509 fatal ("unsupported wide integer operation");
5510 }
5511
5512 /* Check operands of a binary/comparison op. */
5513 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5514 {
5515 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5516 if (GET_MODE_CLASS (mode) == MODE_INT
5517 && mode > MAX_INTEGER_COMPUTATION_MODE)
5518 fatal ("unsupported wide integer operation");
5519
5520 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5521 if (GET_MODE_CLASS (mode) == MODE_INT
5522 && mode > MAX_INTEGER_COMPUTATION_MODE)
5523 fatal ("unsupported wide integer operation");
5524 }
5525 }
5526 #endif
5527
5528 \f
5529 /* expand_expr: generate code for computing expression EXP.
5530 An rtx for the computed value is returned. The value is never null.
5531 In the case of a void EXP, const0_rtx is returned.
5532
5533 The value may be stored in TARGET if TARGET is nonzero.
5534 TARGET is just a suggestion; callers must assume that
5535 the rtx returned may not be the same as TARGET.
5536
5537 If TARGET is CONST0_RTX, it means that the value will be ignored.
5538
5539 If TMODE is not VOIDmode, it suggests generating the
5540 result in mode TMODE. But this is done only when convenient.
5541 Otherwise, TMODE is ignored and the value generated in its natural mode.
5542 TMODE is just a suggestion; callers must assume that
5543 the rtx returned may not have mode TMODE.
5544
5545 Note that TARGET may have neither TMODE nor MODE. In that case, it
5546 probably will not be used.
5547
5548 If MODIFIER is EXPAND_SUM then when EXP is an addition
5549 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5550 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5551 products as above, or REG or MEM, or constant.
5552 Ordinarily in such cases we would output mul or add instructions
5553 and then return a pseudo reg containing the sum.
5554
5555 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5556 it also marks a label as absolutely required (it can't be dead).
5557 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5558 This is used for outputting expressions used in initializers.
5559
5560 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5561 with a constant address even if that address is not normally legitimate.
5562 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5563
5564 rtx
5565 expand_expr (exp, target, tmode, modifier)
5566 register tree exp;
5567 rtx target;
5568 enum machine_mode tmode;
5569 enum expand_modifier modifier;
5570 {
5571 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5572 This is static so it will be accessible to our recursive callees. */
5573 static tree placeholder_list = 0;
5574 register rtx op0, op1, temp;
5575 tree type = TREE_TYPE (exp);
5576 int unsignedp = TREE_UNSIGNED (type);
5577 register enum machine_mode mode;
5578 register enum tree_code code = TREE_CODE (exp);
5579 optab this_optab;
5580 rtx subtarget, original_target;
5581 int ignore;
5582 tree context;
5583 /* Used by check-memory-usage to make modifier read only. */
5584 enum expand_modifier ro_modifier;
5585
5586 /* Handle ERROR_MARK before anybody tries to access its type. */
5587 if (TREE_CODE (exp) == ERROR_MARK)
5588 {
5589 op0 = CONST0_RTX (tmode);
5590 if (op0 != 0)
5591 return op0;
5592 return const0_rtx;
5593 }
5594
5595 mode = TYPE_MODE (type);
5596 /* Use subtarget as the target for operand 0 of a binary operation. */
5597 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5598 original_target = target;
5599 ignore = (target == const0_rtx
5600 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5601 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5602 || code == COND_EXPR)
5603 && TREE_CODE (type) == VOID_TYPE));
5604
5605 /* Make a read-only version of the modifier. */
5606 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5607 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5608 ro_modifier = modifier;
5609 else
5610 ro_modifier = EXPAND_NORMAL;
5611
5612 /* Don't use hard regs as subtargets, because the combiner
5613 can only handle pseudo regs. */
5614 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5615 subtarget = 0;
5616 /* Avoid subtargets inside loops,
5617 since they hide some invariant expressions. */
5618 if (preserve_subexpressions_p ())
5619 subtarget = 0;
5620
5621 /* If we are going to ignore this result, we need only do something
5622 if there is a side-effect somewhere in the expression. If there
5623 is, short-circuit the most common cases here. Note that we must
5624 not call expand_expr with anything but const0_rtx in case this
5625 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5626
5627 if (ignore)
5628 {
5629 if (! TREE_SIDE_EFFECTS (exp))
5630 return const0_rtx;
5631
5632 /* Ensure we reference a volatile object even if value is ignored. */
5633 if (TREE_THIS_VOLATILE (exp)
5634 && TREE_CODE (exp) != FUNCTION_DECL
5635 && mode != VOIDmode && mode != BLKmode)
5636 {
5637 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5638 if (GET_CODE (temp) == MEM)
5639 temp = copy_to_reg (temp);
5640 return const0_rtx;
5641 }
5642
5643 if (TREE_CODE_CLASS (code) == '1')
5644 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5645 VOIDmode, ro_modifier);
5646 else if (TREE_CODE_CLASS (code) == '2'
5647 || TREE_CODE_CLASS (code) == '<')
5648 {
5649 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5650 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5651 return const0_rtx;
5652 }
5653 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5654 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5655 /* If the second operand has no side effects, just evaluate
5656 the first. */
5657 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5658 VOIDmode, ro_modifier);
5659
5660 target = 0;
5661 }
5662
5663 #ifdef MAX_INTEGER_COMPUTATION_MODE
5664 /* Only check stuff here if the mode we want is different from the mode
5665 of the expression; if it's the same, check_max_integer_computiation_mode
5666 will handle it. Do we really need to check this stuff at all? */
5667
5668 if (target
5669 && GET_MODE (target) != mode
5670 && TREE_CODE (exp) != INTEGER_CST
5671 && TREE_CODE (exp) != PARM_DECL
5672 && TREE_CODE (exp) != ARRAY_REF
5673 && TREE_CODE (exp) != COMPONENT_REF
5674 && TREE_CODE (exp) != BIT_FIELD_REF
5675 && TREE_CODE (exp) != INDIRECT_REF
5676 && TREE_CODE (exp) != CALL_EXPR
5677 && TREE_CODE (exp) != VAR_DECL
5678 && TREE_CODE (exp) != RTL_EXPR)
5679 {
5680 enum machine_mode mode = GET_MODE (target);
5681
5682 if (GET_MODE_CLASS (mode) == MODE_INT
5683 && mode > MAX_INTEGER_COMPUTATION_MODE)
5684 fatal ("unsupported wide integer operation");
5685 }
5686
5687 if (tmode != mode
5688 && TREE_CODE (exp) != INTEGER_CST
5689 && TREE_CODE (exp) != PARM_DECL
5690 && TREE_CODE (exp) != ARRAY_REF
5691 && TREE_CODE (exp) != COMPONENT_REF
5692 && TREE_CODE (exp) != BIT_FIELD_REF
5693 && TREE_CODE (exp) != INDIRECT_REF
5694 && TREE_CODE (exp) != VAR_DECL
5695 && TREE_CODE (exp) != CALL_EXPR
5696 && TREE_CODE (exp) != RTL_EXPR
5697 && GET_MODE_CLASS (tmode) == MODE_INT
5698 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5699 fatal ("unsupported wide integer operation");
5700
5701 check_max_integer_computation_mode (exp);
5702 #endif
5703
5704 /* If will do cse, generate all results into pseudo registers
5705 since 1) that allows cse to find more things
5706 and 2) otherwise cse could produce an insn the machine
5707 cannot support. */
5708
5709 if (! cse_not_expected && mode != BLKmode && target
5710 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5711 target = subtarget;
5712
5713 switch (code)
5714 {
5715 case LABEL_DECL:
5716 {
5717 tree function = decl_function_context (exp);
5718 /* Handle using a label in a containing function. */
5719 if (function != current_function_decl
5720 && function != inline_function_decl && function != 0)
5721 {
5722 struct function *p = find_function_data (function);
5723 /* Allocate in the memory associated with the function
5724 that the label is in. */
5725 push_obstacks (p->function_obstack,
5726 p->function_maybepermanent_obstack);
5727
5728 p->expr->x_forced_labels
5729 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5730 p->expr->x_forced_labels);
5731 pop_obstacks ();
5732 }
5733 else
5734 {
5735 if (modifier == EXPAND_INITIALIZER)
5736 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5737 label_rtx (exp),
5738 forced_labels);
5739 }
5740
5741 temp = gen_rtx_MEM (FUNCTION_MODE,
5742 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5743 if (function != current_function_decl
5744 && function != inline_function_decl && function != 0)
5745 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5746 return temp;
5747 }
5748
5749 case PARM_DECL:
5750 if (DECL_RTL (exp) == 0)
5751 {
5752 error_with_decl (exp, "prior parameter's size depends on `%s'");
5753 return CONST0_RTX (mode);
5754 }
5755
5756 /* ... fall through ... */
5757
5758 case VAR_DECL:
5759 /* If a static var's type was incomplete when the decl was written,
5760 but the type is complete now, lay out the decl now. */
5761 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5762 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5763 {
5764 push_obstacks_nochange ();
5765 end_temporary_allocation ();
5766 layout_decl (exp, 0);
5767 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5768 pop_obstacks ();
5769 }
5770
5771 /* Although static-storage variables start off initialized, according to
5772 ANSI C, a memcpy could overwrite them with uninitialized values. So
5773 we check them too. This also lets us check for read-only variables
5774 accessed via a non-const declaration, in case it won't be detected
5775 any other way (e.g., in an embedded system or OS kernel without
5776 memory protection).
5777
5778 Aggregates are not checked here; they're handled elsewhere. */
5779 if (current_function && current_function_check_memory_usage
5780 && code == VAR_DECL
5781 && GET_CODE (DECL_RTL (exp)) == MEM
5782 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5783 {
5784 enum memory_use_mode memory_usage;
5785 memory_usage = get_memory_usage_from_modifier (modifier);
5786
5787 if (memory_usage != MEMORY_USE_DONT)
5788 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5789 XEXP (DECL_RTL (exp), 0), Pmode,
5790 GEN_INT (int_size_in_bytes (type)),
5791 TYPE_MODE (sizetype),
5792 GEN_INT (memory_usage),
5793 TYPE_MODE (integer_type_node));
5794 }
5795
5796 /* ... fall through ... */
5797
5798 case FUNCTION_DECL:
5799 case RESULT_DECL:
5800 if (DECL_RTL (exp) == 0)
5801 abort ();
5802
5803 /* Ensure variable marked as used even if it doesn't go through
5804 a parser. If it hasn't be used yet, write out an external
5805 definition. */
5806 if (! TREE_USED (exp))
5807 {
5808 assemble_external (exp);
5809 TREE_USED (exp) = 1;
5810 }
5811
5812 /* Show we haven't gotten RTL for this yet. */
5813 temp = 0;
5814
5815 /* Handle variables inherited from containing functions. */
5816 context = decl_function_context (exp);
5817
5818 /* We treat inline_function_decl as an alias for the current function
5819 because that is the inline function whose vars, types, etc.
5820 are being merged into the current function.
5821 See expand_inline_function. */
5822
5823 if (context != 0 && context != current_function_decl
5824 && context != inline_function_decl
5825 /* If var is static, we don't need a static chain to access it. */
5826 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5827 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5828 {
5829 rtx addr;
5830
5831 /* Mark as non-local and addressable. */
5832 DECL_NONLOCAL (exp) = 1;
5833 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5834 abort ();
5835 mark_addressable (exp);
5836 if (GET_CODE (DECL_RTL (exp)) != MEM)
5837 abort ();
5838 addr = XEXP (DECL_RTL (exp), 0);
5839 if (GET_CODE (addr) == MEM)
5840 addr = gen_rtx_MEM (Pmode,
5841 fix_lexical_addr (XEXP (addr, 0), exp));
5842 else
5843 addr = fix_lexical_addr (addr, exp);
5844 temp = change_address (DECL_RTL (exp), mode, addr);
5845 }
5846
5847 /* This is the case of an array whose size is to be determined
5848 from its initializer, while the initializer is still being parsed.
5849 See expand_decl. */
5850
5851 else if (GET_CODE (DECL_RTL (exp)) == MEM
5852 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5853 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5854 XEXP (DECL_RTL (exp), 0));
5855
5856 /* If DECL_RTL is memory, we are in the normal case and either
5857 the address is not valid or it is not a register and -fforce-addr
5858 is specified, get the address into a register. */
5859
5860 else if (GET_CODE (DECL_RTL (exp)) == MEM
5861 && modifier != EXPAND_CONST_ADDRESS
5862 && modifier != EXPAND_SUM
5863 && modifier != EXPAND_INITIALIZER
5864 && (! memory_address_p (DECL_MODE (exp),
5865 XEXP (DECL_RTL (exp), 0))
5866 || (flag_force_addr
5867 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5868 temp = change_address (DECL_RTL (exp), VOIDmode,
5869 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5870
5871 /* If we got something, return it. But first, set the alignment
5872 the address is a register. */
5873 if (temp != 0)
5874 {
5875 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5876 mark_reg_pointer (XEXP (temp, 0),
5877 DECL_ALIGN (exp) / BITS_PER_UNIT);
5878
5879 return temp;
5880 }
5881
5882 /* If the mode of DECL_RTL does not match that of the decl, it
5883 must be a promoted value. We return a SUBREG of the wanted mode,
5884 but mark it so that we know that it was already extended. */
5885
5886 if (GET_CODE (DECL_RTL (exp)) == REG
5887 && GET_MODE (DECL_RTL (exp)) != mode)
5888 {
5889 /* Get the signedness used for this variable. Ensure we get the
5890 same mode we got when the variable was declared. */
5891 if (GET_MODE (DECL_RTL (exp))
5892 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5893 abort ();
5894
5895 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5896 SUBREG_PROMOTED_VAR_P (temp) = 1;
5897 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5898 return temp;
5899 }
5900
5901 return DECL_RTL (exp);
5902
5903 case INTEGER_CST:
5904 return immed_double_const (TREE_INT_CST_LOW (exp),
5905 TREE_INT_CST_HIGH (exp),
5906 mode);
5907
5908 case CONST_DECL:
5909 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5910 EXPAND_MEMORY_USE_BAD);
5911
5912 case REAL_CST:
5913 /* If optimized, generate immediate CONST_DOUBLE
5914 which will be turned into memory by reload if necessary.
5915
5916 We used to force a register so that loop.c could see it. But
5917 this does not allow gen_* patterns to perform optimizations with
5918 the constants. It also produces two insns in cases like "x = 1.0;".
5919 On most machines, floating-point constants are not permitted in
5920 many insns, so we'd end up copying it to a register in any case.
5921
5922 Now, we do the copying in expand_binop, if appropriate. */
5923 return immed_real_const (exp);
5924
5925 case COMPLEX_CST:
5926 case STRING_CST:
5927 if (! TREE_CST_RTL (exp))
5928 output_constant_def (exp);
5929
5930 /* TREE_CST_RTL probably contains a constant address.
5931 On RISC machines where a constant address isn't valid,
5932 make some insns to get that address into a register. */
5933 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5934 && modifier != EXPAND_CONST_ADDRESS
5935 && modifier != EXPAND_INITIALIZER
5936 && modifier != EXPAND_SUM
5937 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5938 || (flag_force_addr
5939 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5940 return change_address (TREE_CST_RTL (exp), VOIDmode,
5941 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5942 return TREE_CST_RTL (exp);
5943
5944 case EXPR_WITH_FILE_LOCATION:
5945 {
5946 rtx to_return;
5947 char *saved_input_filename = input_filename;
5948 int saved_lineno = lineno;
5949 input_filename = EXPR_WFL_FILENAME (exp);
5950 lineno = EXPR_WFL_LINENO (exp);
5951 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5952 emit_line_note (input_filename, lineno);
5953 /* Possibly avoid switching back and force here */
5954 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5955 input_filename = saved_input_filename;
5956 lineno = saved_lineno;
5957 return to_return;
5958 }
5959
5960 case SAVE_EXPR:
5961 context = decl_function_context (exp);
5962
5963 /* If this SAVE_EXPR was at global context, assume we are an
5964 initialization function and move it into our context. */
5965 if (context == 0)
5966 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5967
5968 /* We treat inline_function_decl as an alias for the current function
5969 because that is the inline function whose vars, types, etc.
5970 are being merged into the current function.
5971 See expand_inline_function. */
5972 if (context == current_function_decl || context == inline_function_decl)
5973 context = 0;
5974
5975 /* If this is non-local, handle it. */
5976 if (context)
5977 {
5978 /* The following call just exists to abort if the context is
5979 not of a containing function. */
5980 find_function_data (context);
5981
5982 temp = SAVE_EXPR_RTL (exp);
5983 if (temp && GET_CODE (temp) == REG)
5984 {
5985 put_var_into_stack (exp);
5986 temp = SAVE_EXPR_RTL (exp);
5987 }
5988 if (temp == 0 || GET_CODE (temp) != MEM)
5989 abort ();
5990 return change_address (temp, mode,
5991 fix_lexical_addr (XEXP (temp, 0), exp));
5992 }
5993 if (SAVE_EXPR_RTL (exp) == 0)
5994 {
5995 if (mode == VOIDmode)
5996 temp = const0_rtx;
5997 else
5998 temp = assign_temp (type, 3, 0, 0);
5999
6000 SAVE_EXPR_RTL (exp) = temp;
6001 if (!optimize && GET_CODE (temp) == REG)
6002 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6003 save_expr_regs);
6004
6005 /* If the mode of TEMP does not match that of the expression, it
6006 must be a promoted value. We pass store_expr a SUBREG of the
6007 wanted mode but mark it so that we know that it was already
6008 extended. Note that `unsignedp' was modified above in
6009 this case. */
6010
6011 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6012 {
6013 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6014 SUBREG_PROMOTED_VAR_P (temp) = 1;
6015 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6016 }
6017
6018 if (temp == const0_rtx)
6019 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6020 EXPAND_MEMORY_USE_BAD);
6021 else
6022 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6023
6024 TREE_USED (exp) = 1;
6025 }
6026
6027 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6028 must be a promoted value. We return a SUBREG of the wanted mode,
6029 but mark it so that we know that it was already extended. */
6030
6031 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6032 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6033 {
6034 /* Compute the signedness and make the proper SUBREG. */
6035 promote_mode (type, mode, &unsignedp, 0);
6036 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6037 SUBREG_PROMOTED_VAR_P (temp) = 1;
6038 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6039 return temp;
6040 }
6041
6042 return SAVE_EXPR_RTL (exp);
6043
6044 case UNSAVE_EXPR:
6045 {
6046 rtx temp;
6047 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6048 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6049 return temp;
6050 }
6051
6052 case PLACEHOLDER_EXPR:
6053 {
6054 tree placeholder_expr;
6055
6056 /* If there is an object on the head of the placeholder list,
6057 see if some object in it of type TYPE or a pointer to it. For
6058 further information, see tree.def. */
6059 for (placeholder_expr = placeholder_list;
6060 placeholder_expr != 0;
6061 placeholder_expr = TREE_CHAIN (placeholder_expr))
6062 {
6063 tree need_type = TYPE_MAIN_VARIANT (type);
6064 tree object = 0;
6065 tree old_list = placeholder_list;
6066 tree elt;
6067
6068 /* Find the outermost reference that is of the type we want.
6069 If none, see if any object has a type that is a pointer to
6070 the type we want. */
6071 for (elt = TREE_PURPOSE (placeholder_expr);
6072 elt != 0 && object == 0;
6073 elt
6074 = ((TREE_CODE (elt) == COMPOUND_EXPR
6075 || TREE_CODE (elt) == COND_EXPR)
6076 ? TREE_OPERAND (elt, 1)
6077 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6078 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6079 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6080 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6081 ? TREE_OPERAND (elt, 0) : 0))
6082 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6083 object = elt;
6084
6085 for (elt = TREE_PURPOSE (placeholder_expr);
6086 elt != 0 && object == 0;
6087 elt
6088 = ((TREE_CODE (elt) == COMPOUND_EXPR
6089 || TREE_CODE (elt) == COND_EXPR)
6090 ? TREE_OPERAND (elt, 1)
6091 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6092 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6093 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6094 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6095 ? TREE_OPERAND (elt, 0) : 0))
6096 if (POINTER_TYPE_P (TREE_TYPE (elt))
6097 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6098 == need_type))
6099 object = build1 (INDIRECT_REF, need_type, elt);
6100
6101 if (object != 0)
6102 {
6103 /* Expand this object skipping the list entries before
6104 it was found in case it is also a PLACEHOLDER_EXPR.
6105 In that case, we want to translate it using subsequent
6106 entries. */
6107 placeholder_list = TREE_CHAIN (placeholder_expr);
6108 temp = expand_expr (object, original_target, tmode,
6109 ro_modifier);
6110 placeholder_list = old_list;
6111 return temp;
6112 }
6113 }
6114 }
6115
6116 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6117 abort ();
6118
6119 case WITH_RECORD_EXPR:
6120 /* Put the object on the placeholder list, expand our first operand,
6121 and pop the list. */
6122 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6123 placeholder_list);
6124 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6125 tmode, ro_modifier);
6126 placeholder_list = TREE_CHAIN (placeholder_list);
6127 return target;
6128
6129 case GOTO_EXPR:
6130 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6131 expand_goto (TREE_OPERAND (exp, 0));
6132 else
6133 expand_computed_goto (TREE_OPERAND (exp, 0));
6134 return const0_rtx;
6135
6136 case EXIT_EXPR:
6137 expand_exit_loop_if_false (NULL_PTR,
6138 invert_truthvalue (TREE_OPERAND (exp, 0)));
6139 return const0_rtx;
6140
6141 case LABELED_BLOCK_EXPR:
6142 if (LABELED_BLOCK_BODY (exp))
6143 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6144 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6145 return const0_rtx;
6146
6147 case EXIT_BLOCK_EXPR:
6148 if (EXIT_BLOCK_RETURN (exp))
6149 sorry ("returned value in block_exit_expr");
6150 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6151 return const0_rtx;
6152
6153 case LOOP_EXPR:
6154 push_temp_slots ();
6155 expand_start_loop (1);
6156 expand_expr_stmt (TREE_OPERAND (exp, 0));
6157 expand_end_loop ();
6158 pop_temp_slots ();
6159
6160 return const0_rtx;
6161
6162 case BIND_EXPR:
6163 {
6164 tree vars = TREE_OPERAND (exp, 0);
6165 int vars_need_expansion = 0;
6166
6167 /* Need to open a binding contour here because
6168 if there are any cleanups they must be contained here. */
6169 expand_start_bindings (2);
6170
6171 /* Mark the corresponding BLOCK for output in its proper place. */
6172 if (TREE_OPERAND (exp, 2) != 0
6173 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6174 insert_block (TREE_OPERAND (exp, 2));
6175
6176 /* If VARS have not yet been expanded, expand them now. */
6177 while (vars)
6178 {
6179 if (DECL_RTL (vars) == 0)
6180 {
6181 vars_need_expansion = 1;
6182 expand_decl (vars);
6183 }
6184 expand_decl_init (vars);
6185 vars = TREE_CHAIN (vars);
6186 }
6187
6188 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6189
6190 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6191
6192 return temp;
6193 }
6194
6195 case RTL_EXPR:
6196 if (RTL_EXPR_SEQUENCE (exp))
6197 {
6198 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6199 abort ();
6200 emit_insns (RTL_EXPR_SEQUENCE (exp));
6201 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6202 }
6203 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6204 free_temps_for_rtl_expr (exp);
6205 return RTL_EXPR_RTL (exp);
6206
6207 case CONSTRUCTOR:
6208 /* If we don't need the result, just ensure we evaluate any
6209 subexpressions. */
6210 if (ignore)
6211 {
6212 tree elt;
6213 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6214 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6215 EXPAND_MEMORY_USE_BAD);
6216 return const0_rtx;
6217 }
6218
6219 /* All elts simple constants => refer to a constant in memory. But
6220 if this is a non-BLKmode mode, let it store a field at a time
6221 since that should make a CONST_INT or CONST_DOUBLE when we
6222 fold. Likewise, if we have a target we can use, it is best to
6223 store directly into the target unless the type is large enough
6224 that memcpy will be used. If we are making an initializer and
6225 all operands are constant, put it in memory as well. */
6226 else if ((TREE_STATIC (exp)
6227 && ((mode == BLKmode
6228 && ! (target != 0 && safe_from_p (target, exp, 1)))
6229 || TREE_ADDRESSABLE (exp)
6230 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6231 && (!MOVE_BY_PIECES_P
6232 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6233 TYPE_ALIGN (type) / BITS_PER_UNIT))
6234 && ! mostly_zeros_p (exp))))
6235 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6236 {
6237 rtx constructor = output_constant_def (exp);
6238 if (modifier != EXPAND_CONST_ADDRESS
6239 && modifier != EXPAND_INITIALIZER
6240 && modifier != EXPAND_SUM
6241 && (! memory_address_p (GET_MODE (constructor),
6242 XEXP (constructor, 0))
6243 || (flag_force_addr
6244 && GET_CODE (XEXP (constructor, 0)) != REG)))
6245 constructor = change_address (constructor, VOIDmode,
6246 XEXP (constructor, 0));
6247 return constructor;
6248 }
6249
6250 else
6251 {
6252 /* Handle calls that pass values in multiple non-contiguous
6253 locations. The Irix 6 ABI has examples of this. */
6254 if (target == 0 || ! safe_from_p (target, exp, 1)
6255 || GET_CODE (target) == PARALLEL)
6256 {
6257 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6258 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6259 else
6260 target = assign_temp (type, 0, 1, 1);
6261 }
6262
6263 if (TREE_READONLY (exp))
6264 {
6265 if (GET_CODE (target) == MEM)
6266 target = copy_rtx (target);
6267
6268 RTX_UNCHANGING_P (target) = 1;
6269 }
6270
6271 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
6272 return target;
6273 }
6274
6275 case INDIRECT_REF:
6276 {
6277 tree exp1 = TREE_OPERAND (exp, 0);
6278 tree exp2;
6279 tree index;
6280 tree string = string_constant (exp1, &index);
6281 int i;
6282
6283 /* Try to optimize reads from const strings. */
6284 if (string
6285 && TREE_CODE (string) == STRING_CST
6286 && TREE_CODE (index) == INTEGER_CST
6287 && !TREE_INT_CST_HIGH (index)
6288 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6289 && GET_MODE_CLASS (mode) == MODE_INT
6290 && GET_MODE_SIZE (mode) == 1
6291 && modifier != EXPAND_MEMORY_USE_WO)
6292 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6293
6294 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6295 op0 = memory_address (mode, op0);
6296
6297 if (current_function && current_function_check_memory_usage
6298 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6299 {
6300 enum memory_use_mode memory_usage;
6301 memory_usage = get_memory_usage_from_modifier (modifier);
6302
6303 if (memory_usage != MEMORY_USE_DONT)
6304 {
6305 in_check_memory_usage = 1;
6306 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6307 op0, Pmode,
6308 GEN_INT (int_size_in_bytes (type)),
6309 TYPE_MODE (sizetype),
6310 GEN_INT (memory_usage),
6311 TYPE_MODE (integer_type_node));
6312 in_check_memory_usage = 0;
6313 }
6314 }
6315
6316 temp = gen_rtx_MEM (mode, op0);
6317 /* If address was computed by addition,
6318 mark this as an element of an aggregate. */
6319 if (TREE_CODE (exp1) == PLUS_EXPR
6320 || (TREE_CODE (exp1) == SAVE_EXPR
6321 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6322 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6323 || (TREE_CODE (exp1) == ADDR_EXPR
6324 && (exp2 = TREE_OPERAND (exp1, 0))
6325 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6326 MEM_SET_IN_STRUCT_P (temp, 1);
6327
6328 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6329 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6330
6331 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6332 here, because, in C and C++, the fact that a location is accessed
6333 through a pointer to const does not mean that the value there can
6334 never change. Languages where it can never change should
6335 also set TREE_STATIC. */
6336 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6337 return temp;
6338 }
6339
6340 case ARRAY_REF:
6341 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6342 abort ();
6343
6344 {
6345 tree array = TREE_OPERAND (exp, 0);
6346 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6347 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6348 tree index = TREE_OPERAND (exp, 1);
6349 tree index_type = TREE_TYPE (index);
6350 HOST_WIDE_INT i;
6351
6352 /* Optimize the special-case of a zero lower bound.
6353
6354 We convert the low_bound to sizetype to avoid some problems
6355 with constant folding. (E.g. suppose the lower bound is 1,
6356 and its mode is QI. Without the conversion, (ARRAY
6357 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6358 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6359
6360 But sizetype isn't quite right either (especially if
6361 the lowbound is negative). FIXME */
6362
6363 if (! integer_zerop (low_bound))
6364 index = fold (build (MINUS_EXPR, index_type, index,
6365 convert (sizetype, low_bound)));
6366
6367 /* Fold an expression like: "foo"[2].
6368 This is not done in fold so it won't happen inside &.
6369 Don't fold if this is for wide characters since it's too
6370 difficult to do correctly and this is a very rare case. */
6371
6372 if (TREE_CODE (array) == STRING_CST
6373 && TREE_CODE (index) == INTEGER_CST
6374 && !TREE_INT_CST_HIGH (index)
6375 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6376 && GET_MODE_CLASS (mode) == MODE_INT
6377 && GET_MODE_SIZE (mode) == 1)
6378 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6379
6380 /* If this is a constant index into a constant array,
6381 just get the value from the array. Handle both the cases when
6382 we have an explicit constructor and when our operand is a variable
6383 that was declared const. */
6384
6385 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6386 {
6387 if (TREE_CODE (index) == INTEGER_CST
6388 && TREE_INT_CST_HIGH (index) == 0)
6389 {
6390 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6391
6392 i = TREE_INT_CST_LOW (index);
6393 while (elem && i--)
6394 elem = TREE_CHAIN (elem);
6395 if (elem)
6396 return expand_expr (fold (TREE_VALUE (elem)), target,
6397 tmode, ro_modifier);
6398 }
6399 }
6400
6401 else if (optimize >= 1
6402 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6403 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6404 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6405 {
6406 if (TREE_CODE (index) == INTEGER_CST)
6407 {
6408 tree init = DECL_INITIAL (array);
6409
6410 i = TREE_INT_CST_LOW (index);
6411 if (TREE_CODE (init) == CONSTRUCTOR)
6412 {
6413 tree elem = CONSTRUCTOR_ELTS (init);
6414
6415 while (elem
6416 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6417 elem = TREE_CHAIN (elem);
6418 if (elem)
6419 return expand_expr (fold (TREE_VALUE (elem)), target,
6420 tmode, ro_modifier);
6421 }
6422 else if (TREE_CODE (init) == STRING_CST
6423 && TREE_INT_CST_HIGH (index) == 0
6424 && (TREE_INT_CST_LOW (index)
6425 < TREE_STRING_LENGTH (init)))
6426 return (GEN_INT
6427 (TREE_STRING_POINTER
6428 (init)[TREE_INT_CST_LOW (index)]));
6429 }
6430 }
6431 }
6432
6433 /* ... fall through ... */
6434
6435 case COMPONENT_REF:
6436 case BIT_FIELD_REF:
6437 /* If the operand is a CONSTRUCTOR, we can just extract the
6438 appropriate field if it is present. Don't do this if we have
6439 already written the data since we want to refer to that copy
6440 and varasm.c assumes that's what we'll do. */
6441 if (code != ARRAY_REF
6442 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6443 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6444 {
6445 tree elt;
6446
6447 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6448 elt = TREE_CHAIN (elt))
6449 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6450 /* We can normally use the value of the field in the
6451 CONSTRUCTOR. However, if this is a bitfield in
6452 an integral mode that we can fit in a HOST_WIDE_INT,
6453 we must mask only the number of bits in the bitfield,
6454 since this is done implicitly by the constructor. If
6455 the bitfield does not meet either of those conditions,
6456 we can't do this optimization. */
6457 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6458 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6459 == MODE_INT)
6460 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6461 <= HOST_BITS_PER_WIDE_INT))))
6462 {
6463 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6464 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6465 {
6466 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6467
6468 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6469 {
6470 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6471 op0 = expand_and (op0, op1, target);
6472 }
6473 else
6474 {
6475 enum machine_mode imode
6476 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6477 tree count
6478 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6479 0);
6480
6481 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6482 target, 0);
6483 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6484 target, 0);
6485 }
6486 }
6487
6488 return op0;
6489 }
6490 }
6491
6492 {
6493 enum machine_mode mode1;
6494 int bitsize;
6495 int bitpos;
6496 tree offset;
6497 int volatilep = 0;
6498 int alignment;
6499 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6500 &mode1, &unsignedp, &volatilep,
6501 &alignment);
6502
6503 /* If we got back the original object, something is wrong. Perhaps
6504 we are evaluating an expression too early. In any event, don't
6505 infinitely recurse. */
6506 if (tem == exp)
6507 abort ();
6508
6509 /* If TEM's type is a union of variable size, pass TARGET to the inner
6510 computation, since it will need a temporary and TARGET is known
6511 to have to do. This occurs in unchecked conversion in Ada. */
6512
6513 op0 = expand_expr (tem,
6514 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6515 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6516 != INTEGER_CST)
6517 ? target : NULL_RTX),
6518 VOIDmode,
6519 modifier == EXPAND_INITIALIZER
6520 ? modifier : EXPAND_NORMAL);
6521
6522 /* If this is a constant, put it into a register if it is a
6523 legitimate constant and memory if it isn't. */
6524 if (CONSTANT_P (op0))
6525 {
6526 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6527 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6528 op0 = force_reg (mode, op0);
6529 else
6530 op0 = validize_mem (force_const_mem (mode, op0));
6531 }
6532
6533 if (offset != 0)
6534 {
6535 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6536
6537 if (GET_CODE (op0) != MEM)
6538 abort ();
6539
6540 if (GET_MODE (offset_rtx) != ptr_mode)
6541 {
6542 #ifdef POINTERS_EXTEND_UNSIGNED
6543 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6544 #else
6545 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6546 #endif
6547 }
6548
6549 /* A constant address in TO_RTX can have VOIDmode, we must not try
6550 to call force_reg for that case. Avoid that case. */
6551 if (GET_CODE (op0) == MEM
6552 && GET_MODE (op0) == BLKmode
6553 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6554 && bitsize
6555 && (bitpos % bitsize) == 0
6556 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6557 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6558 {
6559 rtx temp = change_address (op0, mode1,
6560 plus_constant (XEXP (op0, 0),
6561 (bitpos /
6562 BITS_PER_UNIT)));
6563 if (GET_CODE (XEXP (temp, 0)) == REG)
6564 op0 = temp;
6565 else
6566 op0 = change_address (op0, mode1,
6567 force_reg (GET_MODE (XEXP (temp, 0)),
6568 XEXP (temp, 0)));
6569 bitpos = 0;
6570 }
6571
6572
6573 op0 = change_address (op0, VOIDmode,
6574 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6575 force_reg (ptr_mode,
6576 offset_rtx)));
6577 }
6578
6579 /* Don't forget about volatility even if this is a bitfield. */
6580 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6581 {
6582 op0 = copy_rtx (op0);
6583 MEM_VOLATILE_P (op0) = 1;
6584 }
6585
6586 /* Check the access. */
6587 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6588 {
6589 enum memory_use_mode memory_usage;
6590 memory_usage = get_memory_usage_from_modifier (modifier);
6591
6592 if (memory_usage != MEMORY_USE_DONT)
6593 {
6594 rtx to;
6595 int size;
6596
6597 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6598 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6599
6600 /* Check the access right of the pointer. */
6601 if (size > BITS_PER_UNIT)
6602 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6603 to, Pmode,
6604 GEN_INT (size / BITS_PER_UNIT),
6605 TYPE_MODE (sizetype),
6606 GEN_INT (memory_usage),
6607 TYPE_MODE (integer_type_node));
6608 }
6609 }
6610
6611 /* In cases where an aligned union has an unaligned object
6612 as a field, we might be extracting a BLKmode value from
6613 an integer-mode (e.g., SImode) object. Handle this case
6614 by doing the extract into an object as wide as the field
6615 (which we know to be the width of a basic mode), then
6616 storing into memory, and changing the mode to BLKmode.
6617 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6618 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6619 if (mode1 == VOIDmode
6620 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6621 || (modifier != EXPAND_CONST_ADDRESS
6622 && modifier != EXPAND_INITIALIZER
6623 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6624 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6625 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6626 /* If the field isn't aligned enough to fetch as a memref,
6627 fetch it as a bit field. */
6628 || (SLOW_UNALIGNED_ACCESS
6629 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6630 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6631 {
6632 enum machine_mode ext_mode = mode;
6633
6634 if (ext_mode == BLKmode)
6635 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6636
6637 if (ext_mode == BLKmode)
6638 {
6639 /* In this case, BITPOS must start at a byte boundary and
6640 TARGET, if specified, must be a MEM. */
6641 if (GET_CODE (op0) != MEM
6642 || (target != 0 && GET_CODE (target) != MEM)
6643 || bitpos % BITS_PER_UNIT != 0)
6644 abort ();
6645
6646 op0 = change_address (op0, VOIDmode,
6647 plus_constant (XEXP (op0, 0),
6648 bitpos / BITS_PER_UNIT));
6649 if (target == 0)
6650 target = assign_temp (type, 0, 1, 1);
6651
6652 emit_block_move (target, op0,
6653 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6654 / BITS_PER_UNIT),
6655 1);
6656
6657 return target;
6658 }
6659
6660 op0 = validize_mem (op0);
6661
6662 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6663 mark_reg_pointer (XEXP (op0, 0), alignment);
6664
6665 op0 = extract_bit_field (op0, bitsize, bitpos,
6666 unsignedp, target, ext_mode, ext_mode,
6667 alignment,
6668 int_size_in_bytes (TREE_TYPE (tem)));
6669
6670 /* If the result is a record type and BITSIZE is narrower than
6671 the mode of OP0, an integral mode, and this is a big endian
6672 machine, we must put the field into the high-order bits. */
6673 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6674 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6675 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6676 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6677 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6678 - bitsize),
6679 op0, 1);
6680
6681 if (mode == BLKmode)
6682 {
6683 rtx new = assign_stack_temp (ext_mode,
6684 bitsize / BITS_PER_UNIT, 0);
6685
6686 emit_move_insn (new, op0);
6687 op0 = copy_rtx (new);
6688 PUT_MODE (op0, BLKmode);
6689 MEM_SET_IN_STRUCT_P (op0, 1);
6690 }
6691
6692 return op0;
6693 }
6694
6695 /* If the result is BLKmode, use that to access the object
6696 now as well. */
6697 if (mode == BLKmode)
6698 mode1 = BLKmode;
6699
6700 /* Get a reference to just this component. */
6701 if (modifier == EXPAND_CONST_ADDRESS
6702 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6703 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6704 (bitpos / BITS_PER_UNIT)));
6705 else
6706 op0 = change_address (op0, mode1,
6707 plus_constant (XEXP (op0, 0),
6708 (bitpos / BITS_PER_UNIT)));
6709
6710 if (GET_CODE (op0) == MEM)
6711 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6712
6713 if (GET_CODE (XEXP (op0, 0)) == REG)
6714 mark_reg_pointer (XEXP (op0, 0), alignment);
6715
6716 MEM_SET_IN_STRUCT_P (op0, 1);
6717 MEM_VOLATILE_P (op0) |= volatilep;
6718 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6719 || modifier == EXPAND_CONST_ADDRESS
6720 || modifier == EXPAND_INITIALIZER)
6721 return op0;
6722 else if (target == 0)
6723 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6724
6725 convert_move (target, op0, unsignedp);
6726 return target;
6727 }
6728
6729 /* Intended for a reference to a buffer of a file-object in Pascal.
6730 But it's not certain that a special tree code will really be
6731 necessary for these. INDIRECT_REF might work for them. */
6732 case BUFFER_REF:
6733 abort ();
6734
6735 case IN_EXPR:
6736 {
6737 /* Pascal set IN expression.
6738
6739 Algorithm:
6740 rlo = set_low - (set_low%bits_per_word);
6741 the_word = set [ (index - rlo)/bits_per_word ];
6742 bit_index = index % bits_per_word;
6743 bitmask = 1 << bit_index;
6744 return !!(the_word & bitmask); */
6745
6746 tree set = TREE_OPERAND (exp, 0);
6747 tree index = TREE_OPERAND (exp, 1);
6748 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6749 tree set_type = TREE_TYPE (set);
6750 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6751 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6752 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6753 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6754 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6755 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6756 rtx setaddr = XEXP (setval, 0);
6757 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6758 rtx rlow;
6759 rtx diff, quo, rem, addr, bit, result;
6760
6761 preexpand_calls (exp);
6762
6763 /* If domain is empty, answer is no. Likewise if index is constant
6764 and out of bounds. */
6765 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6766 && TREE_CODE (set_low_bound) == INTEGER_CST
6767 && tree_int_cst_lt (set_high_bound, set_low_bound))
6768 || (TREE_CODE (index) == INTEGER_CST
6769 && TREE_CODE (set_low_bound) == INTEGER_CST
6770 && tree_int_cst_lt (index, set_low_bound))
6771 || (TREE_CODE (set_high_bound) == INTEGER_CST
6772 && TREE_CODE (index) == INTEGER_CST
6773 && tree_int_cst_lt (set_high_bound, index))))
6774 return const0_rtx;
6775
6776 if (target == 0)
6777 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6778
6779 /* If we get here, we have to generate the code for both cases
6780 (in range and out of range). */
6781
6782 op0 = gen_label_rtx ();
6783 op1 = gen_label_rtx ();
6784
6785 if (! (GET_CODE (index_val) == CONST_INT
6786 && GET_CODE (lo_r) == CONST_INT))
6787 {
6788 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6789 GET_MODE (index_val), iunsignedp, 0, op1);
6790 }
6791
6792 if (! (GET_CODE (index_val) == CONST_INT
6793 && GET_CODE (hi_r) == CONST_INT))
6794 {
6795 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6796 GET_MODE (index_val), iunsignedp, 0, op1);
6797 }
6798
6799 /* Calculate the element number of bit zero in the first word
6800 of the set. */
6801 if (GET_CODE (lo_r) == CONST_INT)
6802 rlow = GEN_INT (INTVAL (lo_r)
6803 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6804 else
6805 rlow = expand_binop (index_mode, and_optab, lo_r,
6806 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6807 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6808
6809 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6810 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6811
6812 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6813 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6814 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6815 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6816
6817 addr = memory_address (byte_mode,
6818 expand_binop (index_mode, add_optab, diff,
6819 setaddr, NULL_RTX, iunsignedp,
6820 OPTAB_LIB_WIDEN));
6821
6822 /* Extract the bit we want to examine */
6823 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6824 gen_rtx_MEM (byte_mode, addr),
6825 make_tree (TREE_TYPE (index), rem),
6826 NULL_RTX, 1);
6827 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6828 GET_MODE (target) == byte_mode ? target : 0,
6829 1, OPTAB_LIB_WIDEN);
6830
6831 if (result != target)
6832 convert_move (target, result, 1);
6833
6834 /* Output the code to handle the out-of-range case. */
6835 emit_jump (op0);
6836 emit_label (op1);
6837 emit_move_insn (target, const0_rtx);
6838 emit_label (op0);
6839 return target;
6840 }
6841
6842 case WITH_CLEANUP_EXPR:
6843 if (RTL_EXPR_RTL (exp) == 0)
6844 {
6845 RTL_EXPR_RTL (exp)
6846 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6847 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6848
6849 /* That's it for this cleanup. */
6850 TREE_OPERAND (exp, 2) = 0;
6851 }
6852 return RTL_EXPR_RTL (exp);
6853
6854 case CLEANUP_POINT_EXPR:
6855 {
6856 /* Start a new binding layer that will keep track of all cleanup
6857 actions to be performed. */
6858 expand_start_bindings (2);
6859
6860 target_temp_slot_level = temp_slot_level;
6861
6862 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6863 /* If we're going to use this value, load it up now. */
6864 if (! ignore)
6865 op0 = force_not_mem (op0);
6866 preserve_temp_slots (op0);
6867 expand_end_bindings (NULL_TREE, 0, 0);
6868 }
6869 return op0;
6870
6871 case CALL_EXPR:
6872 /* Check for a built-in function. */
6873 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6874 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6875 == FUNCTION_DECL)
6876 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6877 return expand_builtin (exp, target, subtarget, tmode, ignore);
6878
6879 /* If this call was expanded already by preexpand_calls,
6880 just return the result we got. */
6881 if (CALL_EXPR_RTL (exp) != 0)
6882 return CALL_EXPR_RTL (exp);
6883
6884 return expand_call (exp, target, ignore);
6885
6886 case NON_LVALUE_EXPR:
6887 case NOP_EXPR:
6888 case CONVERT_EXPR:
6889 case REFERENCE_EXPR:
6890 if (TREE_CODE (type) == UNION_TYPE)
6891 {
6892 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6893 if (target == 0)
6894 {
6895 if (mode != BLKmode)
6896 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6897 else
6898 target = assign_temp (type, 0, 1, 1);
6899 }
6900
6901 if (GET_CODE (target) == MEM)
6902 /* Store data into beginning of memory target. */
6903 store_expr (TREE_OPERAND (exp, 0),
6904 change_address (target, TYPE_MODE (valtype), 0), 0);
6905
6906 else if (GET_CODE (target) == REG)
6907 /* Store this field into a union of the proper type. */
6908 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6909 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6910 VOIDmode, 0, 1,
6911 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6912 0);
6913 else
6914 abort ();
6915
6916 /* Return the entire union. */
6917 return target;
6918 }
6919
6920 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6921 {
6922 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6923 ro_modifier);
6924
6925 /* If the signedness of the conversion differs and OP0 is
6926 a promoted SUBREG, clear that indication since we now
6927 have to do the proper extension. */
6928 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6929 && GET_CODE (op0) == SUBREG)
6930 SUBREG_PROMOTED_VAR_P (op0) = 0;
6931
6932 return op0;
6933 }
6934
6935 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6936 if (GET_MODE (op0) == mode)
6937 return op0;
6938
6939 /* If OP0 is a constant, just convert it into the proper mode. */
6940 if (CONSTANT_P (op0))
6941 return
6942 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6943 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6944
6945 if (modifier == EXPAND_INITIALIZER)
6946 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6947
6948 if (target == 0)
6949 return
6950 convert_to_mode (mode, op0,
6951 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6952 else
6953 convert_move (target, op0,
6954 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6955 return target;
6956
6957 case PLUS_EXPR:
6958 /* We come here from MINUS_EXPR when the second operand is a
6959 constant. */
6960 plus_expr:
6961 this_optab = add_optab;
6962
6963 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6964 something else, make sure we add the register to the constant and
6965 then to the other thing. This case can occur during strength
6966 reduction and doing it this way will produce better code if the
6967 frame pointer or argument pointer is eliminated.
6968
6969 fold-const.c will ensure that the constant is always in the inner
6970 PLUS_EXPR, so the only case we need to do anything about is if
6971 sp, ap, or fp is our second argument, in which case we must swap
6972 the innermost first argument and our second argument. */
6973
6974 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6975 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6976 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6977 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6978 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6979 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6980 {
6981 tree t = TREE_OPERAND (exp, 1);
6982
6983 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6984 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6985 }
6986
6987 /* If the result is to be ptr_mode and we are adding an integer to
6988 something, we might be forming a constant. So try to use
6989 plus_constant. If it produces a sum and we can't accept it,
6990 use force_operand. This allows P = &ARR[const] to generate
6991 efficient code on machines where a SYMBOL_REF is not a valid
6992 address.
6993
6994 If this is an EXPAND_SUM call, always return the sum. */
6995 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6996 || mode == ptr_mode)
6997 {
6998 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6999 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7000 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7001 {
7002 rtx constant_part;
7003
7004 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7005 EXPAND_SUM);
7006 /* Use immed_double_const to ensure that the constant is
7007 truncated according to the mode of OP1, then sign extended
7008 to a HOST_WIDE_INT. Using the constant directly can result
7009 in non-canonical RTL in a 64x32 cross compile. */
7010 constant_part
7011 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7012 (HOST_WIDE_INT) 0,
7013 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7014 op1 = plus_constant (op1, INTVAL (constant_part));
7015 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7016 op1 = force_operand (op1, target);
7017 return op1;
7018 }
7019
7020 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7021 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7022 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7023 {
7024 rtx constant_part;
7025
7026 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7027 EXPAND_SUM);
7028 if (! CONSTANT_P (op0))
7029 {
7030 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7031 VOIDmode, modifier);
7032 /* Don't go to both_summands if modifier
7033 says it's not right to return a PLUS. */
7034 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7035 goto binop2;
7036 goto both_summands;
7037 }
7038 /* Use immed_double_const to ensure that the constant is
7039 truncated according to the mode of OP1, then sign extended
7040 to a HOST_WIDE_INT. Using the constant directly can result
7041 in non-canonical RTL in a 64x32 cross compile. */
7042 constant_part
7043 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7044 (HOST_WIDE_INT) 0,
7045 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7046 op0 = plus_constant (op0, INTVAL (constant_part));
7047 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7048 op0 = force_operand (op0, target);
7049 return op0;
7050 }
7051 }
7052
7053 /* No sense saving up arithmetic to be done
7054 if it's all in the wrong mode to form part of an address.
7055 And force_operand won't know whether to sign-extend or
7056 zero-extend. */
7057 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7058 || mode != ptr_mode)
7059 goto binop;
7060
7061 preexpand_calls (exp);
7062 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7063 subtarget = 0;
7064
7065 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7066 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7067
7068 both_summands:
7069 /* Make sure any term that's a sum with a constant comes last. */
7070 if (GET_CODE (op0) == PLUS
7071 && CONSTANT_P (XEXP (op0, 1)))
7072 {
7073 temp = op0;
7074 op0 = op1;
7075 op1 = temp;
7076 }
7077 /* If adding to a sum including a constant,
7078 associate it to put the constant outside. */
7079 if (GET_CODE (op1) == PLUS
7080 && CONSTANT_P (XEXP (op1, 1)))
7081 {
7082 rtx constant_term = const0_rtx;
7083
7084 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7085 if (temp != 0)
7086 op0 = temp;
7087 /* Ensure that MULT comes first if there is one. */
7088 else if (GET_CODE (op0) == MULT)
7089 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7090 else
7091 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7092
7093 /* Let's also eliminate constants from op0 if possible. */
7094 op0 = eliminate_constant_term (op0, &constant_term);
7095
7096 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7097 their sum should be a constant. Form it into OP1, since the
7098 result we want will then be OP0 + OP1. */
7099
7100 temp = simplify_binary_operation (PLUS, mode, constant_term,
7101 XEXP (op1, 1));
7102 if (temp != 0)
7103 op1 = temp;
7104 else
7105 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7106 }
7107
7108 /* Put a constant term last and put a multiplication first. */
7109 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7110 temp = op1, op1 = op0, op0 = temp;
7111
7112 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7113 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7114
7115 case MINUS_EXPR:
7116 /* For initializers, we are allowed to return a MINUS of two
7117 symbolic constants. Here we handle all cases when both operands
7118 are constant. */
7119 /* Handle difference of two symbolic constants,
7120 for the sake of an initializer. */
7121 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7122 && really_constant_p (TREE_OPERAND (exp, 0))
7123 && really_constant_p (TREE_OPERAND (exp, 1)))
7124 {
7125 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7126 VOIDmode, ro_modifier);
7127 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7128 VOIDmode, ro_modifier);
7129
7130 /* If the last operand is a CONST_INT, use plus_constant of
7131 the negated constant. Else make the MINUS. */
7132 if (GET_CODE (op1) == CONST_INT)
7133 return plus_constant (op0, - INTVAL (op1));
7134 else
7135 return gen_rtx_MINUS (mode, op0, op1);
7136 }
7137 /* Convert A - const to A + (-const). */
7138 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7139 {
7140 tree negated = fold (build1 (NEGATE_EXPR, type,
7141 TREE_OPERAND (exp, 1)));
7142
7143 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7144 /* If we can't negate the constant in TYPE, leave it alone and
7145 expand_binop will negate it for us. We used to try to do it
7146 here in the signed version of TYPE, but that doesn't work
7147 on POINTER_TYPEs. */;
7148 else
7149 {
7150 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7151 goto plus_expr;
7152 }
7153 }
7154 this_optab = sub_optab;
7155 goto binop;
7156
7157 case MULT_EXPR:
7158 preexpand_calls (exp);
7159 /* If first operand is constant, swap them.
7160 Thus the following special case checks need only
7161 check the second operand. */
7162 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7163 {
7164 register tree t1 = TREE_OPERAND (exp, 0);
7165 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7166 TREE_OPERAND (exp, 1) = t1;
7167 }
7168
7169 /* Attempt to return something suitable for generating an
7170 indexed address, for machines that support that. */
7171
7172 if (modifier == EXPAND_SUM && mode == ptr_mode
7173 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7174 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7175 {
7176 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7177 EXPAND_SUM);
7178
7179 /* Apply distributive law if OP0 is x+c. */
7180 if (GET_CODE (op0) == PLUS
7181 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7182 return
7183 gen_rtx_PLUS
7184 (mode,
7185 gen_rtx_MULT
7186 (mode, XEXP (op0, 0),
7187 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7188 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7189 * INTVAL (XEXP (op0, 1))));
7190
7191 if (GET_CODE (op0) != REG)
7192 op0 = force_operand (op0, NULL_RTX);
7193 if (GET_CODE (op0) != REG)
7194 op0 = copy_to_mode_reg (mode, op0);
7195
7196 return
7197 gen_rtx_MULT (mode, op0,
7198 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7199 }
7200
7201 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7202 subtarget = 0;
7203
7204 /* Check for multiplying things that have been extended
7205 from a narrower type. If this machine supports multiplying
7206 in that narrower type with a result in the desired type,
7207 do it that way, and avoid the explicit type-conversion. */
7208 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7209 && TREE_CODE (type) == INTEGER_TYPE
7210 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7211 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7212 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7213 && int_fits_type_p (TREE_OPERAND (exp, 1),
7214 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7215 /* Don't use a widening multiply if a shift will do. */
7216 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7217 > HOST_BITS_PER_WIDE_INT)
7218 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7219 ||
7220 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7221 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7222 ==
7223 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7224 /* If both operands are extended, they must either both
7225 be zero-extended or both be sign-extended. */
7226 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7227 ==
7228 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7229 {
7230 enum machine_mode innermode
7231 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7232 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7233 ? smul_widen_optab : umul_widen_optab);
7234 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7235 ? umul_widen_optab : smul_widen_optab);
7236 if (mode == GET_MODE_WIDER_MODE (innermode))
7237 {
7238 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7239 {
7240 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7241 NULL_RTX, VOIDmode, 0);
7242 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7243 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7244 VOIDmode, 0);
7245 else
7246 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7247 NULL_RTX, VOIDmode, 0);
7248 goto binop2;
7249 }
7250 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7251 && innermode == word_mode)
7252 {
7253 rtx htem;
7254 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7255 NULL_RTX, VOIDmode, 0);
7256 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7257 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7258 VOIDmode, 0);
7259 else
7260 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7261 NULL_RTX, VOIDmode, 0);
7262 temp = expand_binop (mode, other_optab, op0, op1, target,
7263 unsignedp, OPTAB_LIB_WIDEN);
7264 htem = expand_mult_highpart_adjust (innermode,
7265 gen_highpart (innermode, temp),
7266 op0, op1,
7267 gen_highpart (innermode, temp),
7268 unsignedp);
7269 emit_move_insn (gen_highpart (innermode, temp), htem);
7270 return temp;
7271 }
7272 }
7273 }
7274 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7275 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7276 return expand_mult (mode, op0, op1, target, unsignedp);
7277
7278 case TRUNC_DIV_EXPR:
7279 case FLOOR_DIV_EXPR:
7280 case CEIL_DIV_EXPR:
7281 case ROUND_DIV_EXPR:
7282 case EXACT_DIV_EXPR:
7283 preexpand_calls (exp);
7284 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7285 subtarget = 0;
7286 /* Possible optimization: compute the dividend with EXPAND_SUM
7287 then if the divisor is constant can optimize the case
7288 where some terms of the dividend have coeffs divisible by it. */
7289 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7290 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7291 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7292
7293 case RDIV_EXPR:
7294 this_optab = flodiv_optab;
7295 goto binop;
7296
7297 case TRUNC_MOD_EXPR:
7298 case FLOOR_MOD_EXPR:
7299 case CEIL_MOD_EXPR:
7300 case ROUND_MOD_EXPR:
7301 preexpand_calls (exp);
7302 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7303 subtarget = 0;
7304 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7305 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7306 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7307
7308 case FIX_ROUND_EXPR:
7309 case FIX_FLOOR_EXPR:
7310 case FIX_CEIL_EXPR:
7311 abort (); /* Not used for C. */
7312
7313 case FIX_TRUNC_EXPR:
7314 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7315 if (target == 0)
7316 target = gen_reg_rtx (mode);
7317 expand_fix (target, op0, unsignedp);
7318 return target;
7319
7320 case FLOAT_EXPR:
7321 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7322 if (target == 0)
7323 target = gen_reg_rtx (mode);
7324 /* expand_float can't figure out what to do if FROM has VOIDmode.
7325 So give it the correct mode. With -O, cse will optimize this. */
7326 if (GET_MODE (op0) == VOIDmode)
7327 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7328 op0);
7329 expand_float (target, op0,
7330 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7331 return target;
7332
7333 case NEGATE_EXPR:
7334 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7335 temp = expand_unop (mode, neg_optab, op0, target, 0);
7336 if (temp == 0)
7337 abort ();
7338 return temp;
7339
7340 case ABS_EXPR:
7341 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7342
7343 /* Handle complex values specially. */
7344 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7345 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7346 return expand_complex_abs (mode, op0, target, unsignedp);
7347
7348 /* Unsigned abs is simply the operand. Testing here means we don't
7349 risk generating incorrect code below. */
7350 if (TREE_UNSIGNED (type))
7351 return op0;
7352
7353 return expand_abs (mode, op0, target,
7354 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7355
7356 case MAX_EXPR:
7357 case MIN_EXPR:
7358 target = original_target;
7359 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7360 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7361 || GET_MODE (target) != mode
7362 || (GET_CODE (target) == REG
7363 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7364 target = gen_reg_rtx (mode);
7365 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7366 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7367
7368 /* First try to do it with a special MIN or MAX instruction.
7369 If that does not win, use a conditional jump to select the proper
7370 value. */
7371 this_optab = (TREE_UNSIGNED (type)
7372 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7373 : (code == MIN_EXPR ? smin_optab : smax_optab));
7374
7375 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7376 OPTAB_WIDEN);
7377 if (temp != 0)
7378 return temp;
7379
7380 /* At this point, a MEM target is no longer useful; we will get better
7381 code without it. */
7382
7383 if (GET_CODE (target) == MEM)
7384 target = gen_reg_rtx (mode);
7385
7386 if (target != op0)
7387 emit_move_insn (target, op0);
7388
7389 op0 = gen_label_rtx ();
7390
7391 /* If this mode is an integer too wide to compare properly,
7392 compare word by word. Rely on cse to optimize constant cases. */
7393 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode, ccp_jump))
7394 {
7395 if (code == MAX_EXPR)
7396 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7397 target, op1, NULL_RTX, op0);
7398 else
7399 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7400 op1, target, NULL_RTX, op0);
7401 }
7402 else
7403 {
7404 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7405 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7406 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7407 op0);
7408 }
7409 emit_move_insn (target, op1);
7410 emit_label (op0);
7411 return target;
7412
7413 case BIT_NOT_EXPR:
7414 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7415 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7416 if (temp == 0)
7417 abort ();
7418 return temp;
7419
7420 case FFS_EXPR:
7421 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7422 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7423 if (temp == 0)
7424 abort ();
7425 return temp;
7426
7427 /* ??? Can optimize bitwise operations with one arg constant.
7428 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7429 and (a bitwise1 b) bitwise2 b (etc)
7430 but that is probably not worth while. */
7431
7432 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7433 boolean values when we want in all cases to compute both of them. In
7434 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7435 as actual zero-or-1 values and then bitwise anding. In cases where
7436 there cannot be any side effects, better code would be made by
7437 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7438 how to recognize those cases. */
7439
7440 case TRUTH_AND_EXPR:
7441 case BIT_AND_EXPR:
7442 this_optab = and_optab;
7443 goto binop;
7444
7445 case TRUTH_OR_EXPR:
7446 case BIT_IOR_EXPR:
7447 this_optab = ior_optab;
7448 goto binop;
7449
7450 case TRUTH_XOR_EXPR:
7451 case BIT_XOR_EXPR:
7452 this_optab = xor_optab;
7453 goto binop;
7454
7455 case LSHIFT_EXPR:
7456 case RSHIFT_EXPR:
7457 case LROTATE_EXPR:
7458 case RROTATE_EXPR:
7459 preexpand_calls (exp);
7460 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7461 subtarget = 0;
7462 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7463 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7464 unsignedp);
7465
7466 /* Could determine the answer when only additive constants differ. Also,
7467 the addition of one can be handled by changing the condition. */
7468 case LT_EXPR:
7469 case LE_EXPR:
7470 case GT_EXPR:
7471 case GE_EXPR:
7472 case EQ_EXPR:
7473 case NE_EXPR:
7474 preexpand_calls (exp);
7475 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7476 if (temp != 0)
7477 return temp;
7478
7479 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7480 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7481 && original_target
7482 && GET_CODE (original_target) == REG
7483 && (GET_MODE (original_target)
7484 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7485 {
7486 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7487 VOIDmode, 0);
7488
7489 if (temp != original_target)
7490 temp = copy_to_reg (temp);
7491
7492 op1 = gen_label_rtx ();
7493 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7494 GET_MODE (temp), unsignedp, 0, op1);
7495 emit_move_insn (temp, const1_rtx);
7496 emit_label (op1);
7497 return temp;
7498 }
7499
7500 /* If no set-flag instruction, must generate a conditional
7501 store into a temporary variable. Drop through
7502 and handle this like && and ||. */
7503
7504 case TRUTH_ANDIF_EXPR:
7505 case TRUTH_ORIF_EXPR:
7506 if (! ignore
7507 && (target == 0 || ! safe_from_p (target, exp, 1)
7508 /* Make sure we don't have a hard reg (such as function's return
7509 value) live across basic blocks, if not optimizing. */
7510 || (!optimize && GET_CODE (target) == REG
7511 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7512 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7513
7514 if (target)
7515 emit_clr_insn (target);
7516
7517 op1 = gen_label_rtx ();
7518 jumpifnot (exp, op1);
7519
7520 if (target)
7521 emit_0_to_1_insn (target);
7522
7523 emit_label (op1);
7524 return ignore ? const0_rtx : target;
7525
7526 case TRUTH_NOT_EXPR:
7527 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7528 /* The parser is careful to generate TRUTH_NOT_EXPR
7529 only with operands that are always zero or one. */
7530 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7531 target, 1, OPTAB_LIB_WIDEN);
7532 if (temp == 0)
7533 abort ();
7534 return temp;
7535
7536 case COMPOUND_EXPR:
7537 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7538 emit_queue ();
7539 return expand_expr (TREE_OPERAND (exp, 1),
7540 (ignore ? const0_rtx : target),
7541 VOIDmode, 0);
7542
7543 case COND_EXPR:
7544 /* If we would have a "singleton" (see below) were it not for a
7545 conversion in each arm, bring that conversion back out. */
7546 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7547 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7548 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7549 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7550 {
7551 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7552 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7553
7554 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7555 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7556 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7557 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7558 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7559 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7560 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7561 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7562 return expand_expr (build1 (NOP_EXPR, type,
7563 build (COND_EXPR, TREE_TYPE (true),
7564 TREE_OPERAND (exp, 0),
7565 true, false)),
7566 target, tmode, modifier);
7567 }
7568
7569 {
7570 /* Note that COND_EXPRs whose type is a structure or union
7571 are required to be constructed to contain assignments of
7572 a temporary variable, so that we can evaluate them here
7573 for side effect only. If type is void, we must do likewise. */
7574
7575 /* If an arm of the branch requires a cleanup,
7576 only that cleanup is performed. */
7577
7578 tree singleton = 0;
7579 tree binary_op = 0, unary_op = 0;
7580
7581 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7582 convert it to our mode, if necessary. */
7583 if (integer_onep (TREE_OPERAND (exp, 1))
7584 && integer_zerop (TREE_OPERAND (exp, 2))
7585 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7586 {
7587 if (ignore)
7588 {
7589 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7590 ro_modifier);
7591 return const0_rtx;
7592 }
7593
7594 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7595 if (GET_MODE (op0) == mode)
7596 return op0;
7597
7598 if (target == 0)
7599 target = gen_reg_rtx (mode);
7600 convert_move (target, op0, unsignedp);
7601 return target;
7602 }
7603
7604 /* Check for X ? A + B : A. If we have this, we can copy A to the
7605 output and conditionally add B. Similarly for unary operations.
7606 Don't do this if X has side-effects because those side effects
7607 might affect A or B and the "?" operation is a sequence point in
7608 ANSI. (operand_equal_p tests for side effects.) */
7609
7610 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7611 && operand_equal_p (TREE_OPERAND (exp, 2),
7612 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7613 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7614 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7615 && operand_equal_p (TREE_OPERAND (exp, 1),
7616 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7617 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7618 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7619 && operand_equal_p (TREE_OPERAND (exp, 2),
7620 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7621 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7622 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7623 && operand_equal_p (TREE_OPERAND (exp, 1),
7624 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7625 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7626
7627 /* If we are not to produce a result, we have no target. Otherwise,
7628 if a target was specified use it; it will not be used as an
7629 intermediate target unless it is safe. If no target, use a
7630 temporary. */
7631
7632 if (ignore)
7633 temp = 0;
7634 else if (original_target
7635 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7636 || (singleton && GET_CODE (original_target) == REG
7637 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7638 && original_target == var_rtx (singleton)))
7639 && GET_MODE (original_target) == mode
7640 #ifdef HAVE_conditional_move
7641 && (! can_conditionally_move_p (mode)
7642 || GET_CODE (original_target) == REG
7643 || TREE_ADDRESSABLE (type))
7644 #endif
7645 && ! (GET_CODE (original_target) == MEM
7646 && MEM_VOLATILE_P (original_target)))
7647 temp = original_target;
7648 else if (TREE_ADDRESSABLE (type))
7649 abort ();
7650 else
7651 temp = assign_temp (type, 0, 0, 1);
7652
7653 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7654 do the test of X as a store-flag operation, do this as
7655 A + ((X != 0) << log C). Similarly for other simple binary
7656 operators. Only do for C == 1 if BRANCH_COST is low. */
7657 if (temp && singleton && binary_op
7658 && (TREE_CODE (binary_op) == PLUS_EXPR
7659 || TREE_CODE (binary_op) == MINUS_EXPR
7660 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7661 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7662 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7663 : integer_onep (TREE_OPERAND (binary_op, 1)))
7664 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7665 {
7666 rtx result;
7667 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7668 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7669 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7670 : xor_optab);
7671
7672 /* If we had X ? A : A + 1, do this as A + (X == 0).
7673
7674 We have to invert the truth value here and then put it
7675 back later if do_store_flag fails. We cannot simply copy
7676 TREE_OPERAND (exp, 0) to another variable and modify that
7677 because invert_truthvalue can modify the tree pointed to
7678 by its argument. */
7679 if (singleton == TREE_OPERAND (exp, 1))
7680 TREE_OPERAND (exp, 0)
7681 = invert_truthvalue (TREE_OPERAND (exp, 0));
7682
7683 result = do_store_flag (TREE_OPERAND (exp, 0),
7684 (safe_from_p (temp, singleton, 1)
7685 ? temp : NULL_RTX),
7686 mode, BRANCH_COST <= 1);
7687
7688 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7689 result = expand_shift (LSHIFT_EXPR, mode, result,
7690 build_int_2 (tree_log2
7691 (TREE_OPERAND
7692 (binary_op, 1)),
7693 0),
7694 (safe_from_p (temp, singleton, 1)
7695 ? temp : NULL_RTX), 0);
7696
7697 if (result)
7698 {
7699 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7700 return expand_binop (mode, boptab, op1, result, temp,
7701 unsignedp, OPTAB_LIB_WIDEN);
7702 }
7703 else if (singleton == TREE_OPERAND (exp, 1))
7704 TREE_OPERAND (exp, 0)
7705 = invert_truthvalue (TREE_OPERAND (exp, 0));
7706 }
7707
7708 do_pending_stack_adjust ();
7709 NO_DEFER_POP;
7710 op0 = gen_label_rtx ();
7711
7712 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7713 {
7714 if (temp != 0)
7715 {
7716 /* If the target conflicts with the other operand of the
7717 binary op, we can't use it. Also, we can't use the target
7718 if it is a hard register, because evaluating the condition
7719 might clobber it. */
7720 if ((binary_op
7721 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7722 || (GET_CODE (temp) == REG
7723 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7724 temp = gen_reg_rtx (mode);
7725 store_expr (singleton, temp, 0);
7726 }
7727 else
7728 expand_expr (singleton,
7729 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7730 if (singleton == TREE_OPERAND (exp, 1))
7731 jumpif (TREE_OPERAND (exp, 0), op0);
7732 else
7733 jumpifnot (TREE_OPERAND (exp, 0), op0);
7734
7735 start_cleanup_deferral ();
7736 if (binary_op && temp == 0)
7737 /* Just touch the other operand. */
7738 expand_expr (TREE_OPERAND (binary_op, 1),
7739 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7740 else if (binary_op)
7741 store_expr (build (TREE_CODE (binary_op), type,
7742 make_tree (type, temp),
7743 TREE_OPERAND (binary_op, 1)),
7744 temp, 0);
7745 else
7746 store_expr (build1 (TREE_CODE (unary_op), type,
7747 make_tree (type, temp)),
7748 temp, 0);
7749 op1 = op0;
7750 }
7751 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7752 comparison operator. If we have one of these cases, set the
7753 output to A, branch on A (cse will merge these two references),
7754 then set the output to FOO. */
7755 else if (temp
7756 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7757 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7758 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7759 TREE_OPERAND (exp, 1), 0)
7760 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7761 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7762 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7763 {
7764 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7765 temp = gen_reg_rtx (mode);
7766 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7767 jumpif (TREE_OPERAND (exp, 0), op0);
7768
7769 start_cleanup_deferral ();
7770 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7771 op1 = op0;
7772 }
7773 else if (temp
7774 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7775 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7776 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7777 TREE_OPERAND (exp, 2), 0)
7778 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7779 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7780 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7781 {
7782 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7783 temp = gen_reg_rtx (mode);
7784 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7785 jumpifnot (TREE_OPERAND (exp, 0), op0);
7786
7787 start_cleanup_deferral ();
7788 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7789 op1 = op0;
7790 }
7791 else
7792 {
7793 op1 = gen_label_rtx ();
7794 jumpifnot (TREE_OPERAND (exp, 0), op0);
7795
7796 start_cleanup_deferral ();
7797
7798 /* One branch of the cond can be void, if it never returns. For
7799 example A ? throw : E */
7800 if (temp != 0
7801 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7802 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7803 else
7804 expand_expr (TREE_OPERAND (exp, 1),
7805 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7806 end_cleanup_deferral ();
7807 emit_queue ();
7808 emit_jump_insn (gen_jump (op1));
7809 emit_barrier ();
7810 emit_label (op0);
7811 start_cleanup_deferral ();
7812 if (temp != 0
7813 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7814 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7815 else
7816 expand_expr (TREE_OPERAND (exp, 2),
7817 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7818 }
7819
7820 end_cleanup_deferral ();
7821
7822 emit_queue ();
7823 emit_label (op1);
7824 OK_DEFER_POP;
7825
7826 return temp;
7827 }
7828
7829 case TARGET_EXPR:
7830 {
7831 /* Something needs to be initialized, but we didn't know
7832 where that thing was when building the tree. For example,
7833 it could be the return value of a function, or a parameter
7834 to a function which lays down in the stack, or a temporary
7835 variable which must be passed by reference.
7836
7837 We guarantee that the expression will either be constructed
7838 or copied into our original target. */
7839
7840 tree slot = TREE_OPERAND (exp, 0);
7841 tree cleanups = NULL_TREE;
7842 tree exp1;
7843
7844 if (TREE_CODE (slot) != VAR_DECL)
7845 abort ();
7846
7847 if (! ignore)
7848 target = original_target;
7849
7850 /* Set this here so that if we get a target that refers to a
7851 register variable that's already been used, put_reg_into_stack
7852 knows that it should fix up those uses. */
7853 TREE_USED (slot) = 1;
7854
7855 if (target == 0)
7856 {
7857 if (DECL_RTL (slot) != 0)
7858 {
7859 target = DECL_RTL (slot);
7860 /* If we have already expanded the slot, so don't do
7861 it again. (mrs) */
7862 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7863 return target;
7864 }
7865 else
7866 {
7867 target = assign_temp (type, 2, 0, 1);
7868 /* All temp slots at this level must not conflict. */
7869 preserve_temp_slots (target);
7870 DECL_RTL (slot) = target;
7871 if (TREE_ADDRESSABLE (slot))
7872 {
7873 TREE_ADDRESSABLE (slot) = 0;
7874 mark_addressable (slot);
7875 }
7876
7877 /* Since SLOT is not known to the called function
7878 to belong to its stack frame, we must build an explicit
7879 cleanup. This case occurs when we must build up a reference
7880 to pass the reference as an argument. In this case,
7881 it is very likely that such a reference need not be
7882 built here. */
7883
7884 if (TREE_OPERAND (exp, 2) == 0)
7885 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7886 cleanups = TREE_OPERAND (exp, 2);
7887 }
7888 }
7889 else
7890 {
7891 /* This case does occur, when expanding a parameter which
7892 needs to be constructed on the stack. The target
7893 is the actual stack address that we want to initialize.
7894 The function we call will perform the cleanup in this case. */
7895
7896 /* If we have already assigned it space, use that space,
7897 not target that we were passed in, as our target
7898 parameter is only a hint. */
7899 if (DECL_RTL (slot) != 0)
7900 {
7901 target = DECL_RTL (slot);
7902 /* If we have already expanded the slot, so don't do
7903 it again. (mrs) */
7904 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7905 return target;
7906 }
7907 else
7908 {
7909 DECL_RTL (slot) = target;
7910 /* If we must have an addressable slot, then make sure that
7911 the RTL that we just stored in slot is OK. */
7912 if (TREE_ADDRESSABLE (slot))
7913 {
7914 TREE_ADDRESSABLE (slot) = 0;
7915 mark_addressable (slot);
7916 }
7917 }
7918 }
7919
7920 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7921 /* Mark it as expanded. */
7922 TREE_OPERAND (exp, 1) = NULL_TREE;
7923
7924 store_expr (exp1, target, 0);
7925
7926 expand_decl_cleanup (NULL_TREE, cleanups);
7927
7928 return target;
7929 }
7930
7931 case INIT_EXPR:
7932 {
7933 tree lhs = TREE_OPERAND (exp, 0);
7934 tree rhs = TREE_OPERAND (exp, 1);
7935 tree noncopied_parts = 0;
7936 tree lhs_type = TREE_TYPE (lhs);
7937
7938 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7939 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7940 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7941 TYPE_NONCOPIED_PARTS (lhs_type));
7942 while (noncopied_parts != 0)
7943 {
7944 expand_assignment (TREE_VALUE (noncopied_parts),
7945 TREE_PURPOSE (noncopied_parts), 0, 0);
7946 noncopied_parts = TREE_CHAIN (noncopied_parts);
7947 }
7948 return temp;
7949 }
7950
7951 case MODIFY_EXPR:
7952 {
7953 /* If lhs is complex, expand calls in rhs before computing it.
7954 That's so we don't compute a pointer and save it over a call.
7955 If lhs is simple, compute it first so we can give it as a
7956 target if the rhs is just a call. This avoids an extra temp and copy
7957 and that prevents a partial-subsumption which makes bad code.
7958 Actually we could treat component_ref's of vars like vars. */
7959
7960 tree lhs = TREE_OPERAND (exp, 0);
7961 tree rhs = TREE_OPERAND (exp, 1);
7962 tree noncopied_parts = 0;
7963 tree lhs_type = TREE_TYPE (lhs);
7964
7965 temp = 0;
7966
7967 if (TREE_CODE (lhs) != VAR_DECL
7968 && TREE_CODE (lhs) != RESULT_DECL
7969 && TREE_CODE (lhs) != PARM_DECL
7970 && ! (TREE_CODE (lhs) == INDIRECT_REF
7971 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7972 preexpand_calls (exp);
7973
7974 /* Check for |= or &= of a bitfield of size one into another bitfield
7975 of size 1. In this case, (unless we need the result of the
7976 assignment) we can do this more efficiently with a
7977 test followed by an assignment, if necessary.
7978
7979 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7980 things change so we do, this code should be enhanced to
7981 support it. */
7982 if (ignore
7983 && TREE_CODE (lhs) == COMPONENT_REF
7984 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7985 || TREE_CODE (rhs) == BIT_AND_EXPR)
7986 && TREE_OPERAND (rhs, 0) == lhs
7987 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7988 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7989 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7990 {
7991 rtx label = gen_label_rtx ();
7992
7993 do_jump (TREE_OPERAND (rhs, 1),
7994 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7995 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7996 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7997 (TREE_CODE (rhs) == BIT_IOR_EXPR
7998 ? integer_one_node
7999 : integer_zero_node)),
8000 0, 0);
8001 do_pending_stack_adjust ();
8002 emit_label (label);
8003 return const0_rtx;
8004 }
8005
8006 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8007 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8008 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8009 TYPE_NONCOPIED_PARTS (lhs_type));
8010
8011 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8012 while (noncopied_parts != 0)
8013 {
8014 expand_assignment (TREE_PURPOSE (noncopied_parts),
8015 TREE_VALUE (noncopied_parts), 0, 0);
8016 noncopied_parts = TREE_CHAIN (noncopied_parts);
8017 }
8018 return temp;
8019 }
8020
8021 case RETURN_EXPR:
8022 if (!TREE_OPERAND (exp, 0))
8023 expand_null_return ();
8024 else
8025 expand_return (TREE_OPERAND (exp, 0));
8026 return const0_rtx;
8027
8028 case PREINCREMENT_EXPR:
8029 case PREDECREMENT_EXPR:
8030 return expand_increment (exp, 0, ignore);
8031
8032 case POSTINCREMENT_EXPR:
8033 case POSTDECREMENT_EXPR:
8034 /* Faster to treat as pre-increment if result is not used. */
8035 return expand_increment (exp, ! ignore, ignore);
8036
8037 case ADDR_EXPR:
8038 /* If nonzero, TEMP will be set to the address of something that might
8039 be a MEM corresponding to a stack slot. */
8040 temp = 0;
8041
8042 /* Are we taking the address of a nested function? */
8043 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8044 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8045 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8046 && ! TREE_STATIC (exp))
8047 {
8048 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8049 op0 = force_operand (op0, target);
8050 }
8051 /* If we are taking the address of something erroneous, just
8052 return a zero. */
8053 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8054 return const0_rtx;
8055 else
8056 {
8057 /* We make sure to pass const0_rtx down if we came in with
8058 ignore set, to avoid doing the cleanups twice for something. */
8059 op0 = expand_expr (TREE_OPERAND (exp, 0),
8060 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8061 (modifier == EXPAND_INITIALIZER
8062 ? modifier : EXPAND_CONST_ADDRESS));
8063
8064 /* If we are going to ignore the result, OP0 will have been set
8065 to const0_rtx, so just return it. Don't get confused and
8066 think we are taking the address of the constant. */
8067 if (ignore)
8068 return op0;
8069
8070 op0 = protect_from_queue (op0, 0);
8071
8072 /* We would like the object in memory. If it is a constant, we can
8073 have it be statically allocated into memory. For a non-constant,
8074 we need to allocate some memory and store the value into it. */
8075
8076 if (CONSTANT_P (op0))
8077 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8078 op0);
8079 else if (GET_CODE (op0) == MEM)
8080 {
8081 mark_temp_addr_taken (op0);
8082 temp = XEXP (op0, 0);
8083 }
8084
8085 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8086 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8087 {
8088 /* If this object is in a register, it must be not
8089 be BLKmode. */
8090 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8091 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8092
8093 mark_temp_addr_taken (memloc);
8094 emit_move_insn (memloc, op0);
8095 op0 = memloc;
8096 }
8097
8098 if (GET_CODE (op0) != MEM)
8099 abort ();
8100
8101 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8102 {
8103 temp = XEXP (op0, 0);
8104 #ifdef POINTERS_EXTEND_UNSIGNED
8105 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8106 && mode == ptr_mode)
8107 temp = convert_memory_address (ptr_mode, temp);
8108 #endif
8109 return temp;
8110 }
8111
8112 op0 = force_operand (XEXP (op0, 0), target);
8113 }
8114
8115 if (flag_force_addr && GET_CODE (op0) != REG)
8116 op0 = force_reg (Pmode, op0);
8117
8118 if (GET_CODE (op0) == REG
8119 && ! REG_USERVAR_P (op0))
8120 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8121
8122 /* If we might have had a temp slot, add an equivalent address
8123 for it. */
8124 if (temp != 0)
8125 update_temp_slot_address (temp, op0);
8126
8127 #ifdef POINTERS_EXTEND_UNSIGNED
8128 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8129 && mode == ptr_mode)
8130 op0 = convert_memory_address (ptr_mode, op0);
8131 #endif
8132
8133 return op0;
8134
8135 case ENTRY_VALUE_EXPR:
8136 abort ();
8137
8138 /* COMPLEX type for Extended Pascal & Fortran */
8139 case COMPLEX_EXPR:
8140 {
8141 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8142 rtx insns;
8143
8144 /* Get the rtx code of the operands. */
8145 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8146 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8147
8148 if (! target)
8149 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8150
8151 start_sequence ();
8152
8153 /* Move the real (op0) and imaginary (op1) parts to their location. */
8154 emit_move_insn (gen_realpart (mode, target), op0);
8155 emit_move_insn (gen_imagpart (mode, target), op1);
8156
8157 insns = get_insns ();
8158 end_sequence ();
8159
8160 /* Complex construction should appear as a single unit. */
8161 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8162 each with a separate pseudo as destination.
8163 It's not correct for flow to treat them as a unit. */
8164 if (GET_CODE (target) != CONCAT)
8165 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8166 else
8167 emit_insns (insns);
8168
8169 return target;
8170 }
8171
8172 case REALPART_EXPR:
8173 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8174 return gen_realpart (mode, op0);
8175
8176 case IMAGPART_EXPR:
8177 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8178 return gen_imagpart (mode, op0);
8179
8180 case CONJ_EXPR:
8181 {
8182 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8183 rtx imag_t;
8184 rtx insns;
8185
8186 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8187
8188 if (! target)
8189 target = gen_reg_rtx (mode);
8190
8191 start_sequence ();
8192
8193 /* Store the realpart and the negated imagpart to target. */
8194 emit_move_insn (gen_realpart (partmode, target),
8195 gen_realpart (partmode, op0));
8196
8197 imag_t = gen_imagpart (partmode, target);
8198 temp = expand_unop (partmode, neg_optab,
8199 gen_imagpart (partmode, op0), imag_t, 0);
8200 if (temp != imag_t)
8201 emit_move_insn (imag_t, temp);
8202
8203 insns = get_insns ();
8204 end_sequence ();
8205
8206 /* Conjugate should appear as a single unit
8207 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8208 each with a separate pseudo as destination.
8209 It's not correct for flow to treat them as a unit. */
8210 if (GET_CODE (target) != CONCAT)
8211 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8212 else
8213 emit_insns (insns);
8214
8215 return target;
8216 }
8217
8218 case TRY_CATCH_EXPR:
8219 {
8220 tree handler = TREE_OPERAND (exp, 1);
8221
8222 expand_eh_region_start ();
8223
8224 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8225
8226 expand_eh_region_end (handler);
8227
8228 return op0;
8229 }
8230
8231 case TRY_FINALLY_EXPR:
8232 {
8233 tree try_block = TREE_OPERAND (exp, 0);
8234 tree finally_block = TREE_OPERAND (exp, 1);
8235 rtx finally_label = gen_label_rtx ();
8236 rtx done_label = gen_label_rtx ();
8237 rtx return_link = gen_reg_rtx (Pmode);
8238 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8239 (tree) finally_label, (tree) return_link);
8240 TREE_SIDE_EFFECTS (cleanup) = 1;
8241
8242 /* Start a new binding layer that will keep track of all cleanup
8243 actions to be performed. */
8244 expand_start_bindings (2);
8245
8246 target_temp_slot_level = temp_slot_level;
8247
8248 expand_decl_cleanup (NULL_TREE, cleanup);
8249 op0 = expand_expr (try_block, target, tmode, modifier);
8250
8251 preserve_temp_slots (op0);
8252 expand_end_bindings (NULL_TREE, 0, 0);
8253 emit_jump (done_label);
8254 emit_label (finally_label);
8255 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8256 emit_indirect_jump (return_link);
8257 emit_label (done_label);
8258 return op0;
8259 }
8260
8261 case GOTO_SUBROUTINE_EXPR:
8262 {
8263 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8264 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8265 rtx return_address = gen_label_rtx ();
8266 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8267 emit_jump (subr);
8268 emit_label (return_address);
8269 return const0_rtx;
8270 }
8271
8272 case POPDCC_EXPR:
8273 {
8274 rtx dcc = get_dynamic_cleanup_chain ();
8275 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8276 return const0_rtx;
8277 }
8278
8279 case POPDHC_EXPR:
8280 {
8281 rtx dhc = get_dynamic_handler_chain ();
8282 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8283 return const0_rtx;
8284 }
8285
8286 case VA_ARG_EXPR:
8287 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8288
8289 default:
8290 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8291 }
8292
8293 /* Here to do an ordinary binary operator, generating an instruction
8294 from the optab already placed in `this_optab'. */
8295 binop:
8296 preexpand_calls (exp);
8297 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8298 subtarget = 0;
8299 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8300 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8301 binop2:
8302 temp = expand_binop (mode, this_optab, op0, op1, target,
8303 unsignedp, OPTAB_LIB_WIDEN);
8304 if (temp == 0)
8305 abort ();
8306 return temp;
8307 }
8308 \f
8309 /* Return the tree node and offset if a given argument corresponds to
8310 a string constant. */
8311
8312 tree
8313 string_constant (arg, ptr_offset)
8314 tree arg;
8315 tree *ptr_offset;
8316 {
8317 STRIP_NOPS (arg);
8318
8319 if (TREE_CODE (arg) == ADDR_EXPR
8320 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8321 {
8322 *ptr_offset = integer_zero_node;
8323 return TREE_OPERAND (arg, 0);
8324 }
8325 else if (TREE_CODE (arg) == PLUS_EXPR)
8326 {
8327 tree arg0 = TREE_OPERAND (arg, 0);
8328 tree arg1 = TREE_OPERAND (arg, 1);
8329
8330 STRIP_NOPS (arg0);
8331 STRIP_NOPS (arg1);
8332
8333 if (TREE_CODE (arg0) == ADDR_EXPR
8334 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8335 {
8336 *ptr_offset = arg1;
8337 return TREE_OPERAND (arg0, 0);
8338 }
8339 else if (TREE_CODE (arg1) == ADDR_EXPR
8340 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8341 {
8342 *ptr_offset = arg0;
8343 return TREE_OPERAND (arg1, 0);
8344 }
8345 }
8346
8347 return 0;
8348 }
8349 \f
8350 /* Expand code for a post- or pre- increment or decrement
8351 and return the RTX for the result.
8352 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8353
8354 static rtx
8355 expand_increment (exp, post, ignore)
8356 register tree exp;
8357 int post, ignore;
8358 {
8359 register rtx op0, op1;
8360 register rtx temp, value;
8361 register tree incremented = TREE_OPERAND (exp, 0);
8362 optab this_optab = add_optab;
8363 int icode;
8364 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8365 int op0_is_copy = 0;
8366 int single_insn = 0;
8367 /* 1 means we can't store into OP0 directly,
8368 because it is a subreg narrower than a word,
8369 and we don't dare clobber the rest of the word. */
8370 int bad_subreg = 0;
8371
8372 /* Stabilize any component ref that might need to be
8373 evaluated more than once below. */
8374 if (!post
8375 || TREE_CODE (incremented) == BIT_FIELD_REF
8376 || (TREE_CODE (incremented) == COMPONENT_REF
8377 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8378 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8379 incremented = stabilize_reference (incremented);
8380 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8381 ones into save exprs so that they don't accidentally get evaluated
8382 more than once by the code below. */
8383 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8384 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8385 incremented = save_expr (incremented);
8386
8387 /* Compute the operands as RTX.
8388 Note whether OP0 is the actual lvalue or a copy of it:
8389 I believe it is a copy iff it is a register or subreg
8390 and insns were generated in computing it. */
8391
8392 temp = get_last_insn ();
8393 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8394
8395 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8396 in place but instead must do sign- or zero-extension during assignment,
8397 so we copy it into a new register and let the code below use it as
8398 a copy.
8399
8400 Note that we can safely modify this SUBREG since it is know not to be
8401 shared (it was made by the expand_expr call above). */
8402
8403 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8404 {
8405 if (post)
8406 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8407 else
8408 bad_subreg = 1;
8409 }
8410 else if (GET_CODE (op0) == SUBREG
8411 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8412 {
8413 /* We cannot increment this SUBREG in place. If we are
8414 post-incrementing, get a copy of the old value. Otherwise,
8415 just mark that we cannot increment in place. */
8416 if (post)
8417 op0 = copy_to_reg (op0);
8418 else
8419 bad_subreg = 1;
8420 }
8421
8422 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8423 && temp != get_last_insn ());
8424 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8425 EXPAND_MEMORY_USE_BAD);
8426
8427 /* Decide whether incrementing or decrementing. */
8428 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8429 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8430 this_optab = sub_optab;
8431
8432 /* Convert decrement by a constant into a negative increment. */
8433 if (this_optab == sub_optab
8434 && GET_CODE (op1) == CONST_INT)
8435 {
8436 op1 = GEN_INT (- INTVAL (op1));
8437 this_optab = add_optab;
8438 }
8439
8440 /* For a preincrement, see if we can do this with a single instruction. */
8441 if (!post)
8442 {
8443 icode = (int) this_optab->handlers[(int) mode].insn_code;
8444 if (icode != (int) CODE_FOR_nothing
8445 /* Make sure that OP0 is valid for operands 0 and 1
8446 of the insn we want to queue. */
8447 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8448 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8449 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8450 single_insn = 1;
8451 }
8452
8453 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8454 then we cannot just increment OP0. We must therefore contrive to
8455 increment the original value. Then, for postincrement, we can return
8456 OP0 since it is a copy of the old value. For preincrement, expand here
8457 unless we can do it with a single insn.
8458
8459 Likewise if storing directly into OP0 would clobber high bits
8460 we need to preserve (bad_subreg). */
8461 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8462 {
8463 /* This is the easiest way to increment the value wherever it is.
8464 Problems with multiple evaluation of INCREMENTED are prevented
8465 because either (1) it is a component_ref or preincrement,
8466 in which case it was stabilized above, or (2) it is an array_ref
8467 with constant index in an array in a register, which is
8468 safe to reevaluate. */
8469 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8470 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8471 ? MINUS_EXPR : PLUS_EXPR),
8472 TREE_TYPE (exp),
8473 incremented,
8474 TREE_OPERAND (exp, 1));
8475
8476 while (TREE_CODE (incremented) == NOP_EXPR
8477 || TREE_CODE (incremented) == CONVERT_EXPR)
8478 {
8479 newexp = convert (TREE_TYPE (incremented), newexp);
8480 incremented = TREE_OPERAND (incremented, 0);
8481 }
8482
8483 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8484 return post ? op0 : temp;
8485 }
8486
8487 if (post)
8488 {
8489 /* We have a true reference to the value in OP0.
8490 If there is an insn to add or subtract in this mode, queue it.
8491 Queueing the increment insn avoids the register shuffling
8492 that often results if we must increment now and first save
8493 the old value for subsequent use. */
8494
8495 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8496 op0 = stabilize (op0);
8497 #endif
8498
8499 icode = (int) this_optab->handlers[(int) mode].insn_code;
8500 if (icode != (int) CODE_FOR_nothing
8501 /* Make sure that OP0 is valid for operands 0 and 1
8502 of the insn we want to queue. */
8503 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8504 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8505 {
8506 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8507 op1 = force_reg (mode, op1);
8508
8509 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8510 }
8511 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8512 {
8513 rtx addr = (general_operand (XEXP (op0, 0), mode)
8514 ? force_reg (Pmode, XEXP (op0, 0))
8515 : copy_to_reg (XEXP (op0, 0)));
8516 rtx temp, result;
8517
8518 op0 = change_address (op0, VOIDmode, addr);
8519 temp = force_reg (GET_MODE (op0), op0);
8520 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8521 op1 = force_reg (mode, op1);
8522
8523 /* The increment queue is LIFO, thus we have to `queue'
8524 the instructions in reverse order. */
8525 enqueue_insn (op0, gen_move_insn (op0, temp));
8526 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8527 return result;
8528 }
8529 }
8530
8531 /* Preincrement, or we can't increment with one simple insn. */
8532 if (post)
8533 /* Save a copy of the value before inc or dec, to return it later. */
8534 temp = value = copy_to_reg (op0);
8535 else
8536 /* Arrange to return the incremented value. */
8537 /* Copy the rtx because expand_binop will protect from the queue,
8538 and the results of that would be invalid for us to return
8539 if our caller does emit_queue before using our result. */
8540 temp = copy_rtx (value = op0);
8541
8542 /* Increment however we can. */
8543 op1 = expand_binop (mode, this_optab, value, op1,
8544 current_function_check_memory_usage ? NULL_RTX : op0,
8545 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8546 /* Make sure the value is stored into OP0. */
8547 if (op1 != op0)
8548 emit_move_insn (op0, op1);
8549
8550 return temp;
8551 }
8552 \f
8553 /* Expand all function calls contained within EXP, innermost ones first.
8554 But don't look within expressions that have sequence points.
8555 For each CALL_EXPR, record the rtx for its value
8556 in the CALL_EXPR_RTL field. */
8557
8558 static void
8559 preexpand_calls (exp)
8560 tree exp;
8561 {
8562 register int nops, i;
8563 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8564
8565 if (! do_preexpand_calls)
8566 return;
8567
8568 /* Only expressions and references can contain calls. */
8569
8570 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8571 return;
8572
8573 switch (TREE_CODE (exp))
8574 {
8575 case CALL_EXPR:
8576 /* Do nothing if already expanded. */
8577 if (CALL_EXPR_RTL (exp) != 0
8578 /* Do nothing if the call returns a variable-sized object. */
8579 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
8580 /* Do nothing to built-in functions. */
8581 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
8582 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8583 == FUNCTION_DECL)
8584 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8585 return;
8586
8587 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8588 return;
8589
8590 case COMPOUND_EXPR:
8591 case COND_EXPR:
8592 case TRUTH_ANDIF_EXPR:
8593 case TRUTH_ORIF_EXPR:
8594 /* If we find one of these, then we can be sure
8595 the adjust will be done for it (since it makes jumps).
8596 Do it now, so that if this is inside an argument
8597 of a function, we don't get the stack adjustment
8598 after some other args have already been pushed. */
8599 do_pending_stack_adjust ();
8600 return;
8601
8602 case BLOCK:
8603 case RTL_EXPR:
8604 case WITH_CLEANUP_EXPR:
8605 case CLEANUP_POINT_EXPR:
8606 case TRY_CATCH_EXPR:
8607 return;
8608
8609 case SAVE_EXPR:
8610 if (SAVE_EXPR_RTL (exp) != 0)
8611 return;
8612
8613 default:
8614 break;
8615 }
8616
8617 nops = tree_code_length[(int) TREE_CODE (exp)];
8618 for (i = 0; i < nops; i++)
8619 if (TREE_OPERAND (exp, i) != 0)
8620 {
8621 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
8622 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
8623 It doesn't happen before the call is made. */
8624 ;
8625 else
8626 {
8627 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8628 if (type == 'e' || type == '<' || type == '1' || type == '2'
8629 || type == 'r')
8630 preexpand_calls (TREE_OPERAND (exp, i));
8631 }
8632 }
8633 }
8634 \f
8635 /* At the start of a function, record that we have no previously-pushed
8636 arguments waiting to be popped. */
8637
8638 void
8639 init_pending_stack_adjust ()
8640 {
8641 pending_stack_adjust = 0;
8642 }
8643
8644 /* When exiting from function, if safe, clear out any pending stack adjust
8645 so the adjustment won't get done.
8646
8647 Note, if the current function calls alloca, then it must have a
8648 frame pointer regardless of the value of flag_omit_frame_pointer. */
8649
8650 void
8651 clear_pending_stack_adjust ()
8652 {
8653 #ifdef EXIT_IGNORE_STACK
8654 if (optimize > 0
8655 && (! flag_omit_frame_pointer || current_function_calls_alloca)
8656 && EXIT_IGNORE_STACK
8657 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8658 && ! flag_inline_functions)
8659 pending_stack_adjust = 0;
8660 #endif
8661 }
8662
8663 /* Pop any previously-pushed arguments that have not been popped yet. */
8664
8665 void
8666 do_pending_stack_adjust ()
8667 {
8668 if (inhibit_defer_pop == 0)
8669 {
8670 if (pending_stack_adjust != 0)
8671 adjust_stack (GEN_INT (pending_stack_adjust));
8672 pending_stack_adjust = 0;
8673 }
8674 }
8675 \f
8676 /* Expand conditional expressions. */
8677
8678 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8679 LABEL is an rtx of code CODE_LABEL, in this function and all the
8680 functions here. */
8681
8682 void
8683 jumpifnot (exp, label)
8684 tree exp;
8685 rtx label;
8686 {
8687 do_jump (exp, label, NULL_RTX);
8688 }
8689
8690 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8691
8692 void
8693 jumpif (exp, label)
8694 tree exp;
8695 rtx label;
8696 {
8697 do_jump (exp, NULL_RTX, label);
8698 }
8699
8700 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8701 the result is zero, or IF_TRUE_LABEL if the result is one.
8702 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8703 meaning fall through in that case.
8704
8705 do_jump always does any pending stack adjust except when it does not
8706 actually perform a jump. An example where there is no jump
8707 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8708
8709 This function is responsible for optimizing cases such as
8710 &&, || and comparison operators in EXP. */
8711
8712 void
8713 do_jump (exp, if_false_label, if_true_label)
8714 tree exp;
8715 rtx if_false_label, if_true_label;
8716 {
8717 register enum tree_code code = TREE_CODE (exp);
8718 /* Some cases need to create a label to jump to
8719 in order to properly fall through.
8720 These cases set DROP_THROUGH_LABEL nonzero. */
8721 rtx drop_through_label = 0;
8722 rtx temp;
8723 int i;
8724 tree type;
8725 enum machine_mode mode;
8726
8727 #ifdef MAX_INTEGER_COMPUTATION_MODE
8728 check_max_integer_computation_mode (exp);
8729 #endif
8730
8731 emit_queue ();
8732
8733 switch (code)
8734 {
8735 case ERROR_MARK:
8736 break;
8737
8738 case INTEGER_CST:
8739 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8740 if (temp)
8741 emit_jump (temp);
8742 break;
8743
8744 #if 0
8745 /* This is not true with #pragma weak */
8746 case ADDR_EXPR:
8747 /* The address of something can never be zero. */
8748 if (if_true_label)
8749 emit_jump (if_true_label);
8750 break;
8751 #endif
8752
8753 case NOP_EXPR:
8754 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8755 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8756 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8757 goto normal;
8758 case CONVERT_EXPR:
8759 /* If we are narrowing the operand, we have to do the compare in the
8760 narrower mode. */
8761 if ((TYPE_PRECISION (TREE_TYPE (exp))
8762 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8763 goto normal;
8764 case NON_LVALUE_EXPR:
8765 case REFERENCE_EXPR:
8766 case ABS_EXPR:
8767 case NEGATE_EXPR:
8768 case LROTATE_EXPR:
8769 case RROTATE_EXPR:
8770 /* These cannot change zero->non-zero or vice versa. */
8771 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8772 break;
8773
8774 #if 0
8775 /* This is never less insns than evaluating the PLUS_EXPR followed by
8776 a test and can be longer if the test is eliminated. */
8777 case PLUS_EXPR:
8778 /* Reduce to minus. */
8779 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8780 TREE_OPERAND (exp, 0),
8781 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8782 TREE_OPERAND (exp, 1))));
8783 /* Process as MINUS. */
8784 #endif
8785
8786 case MINUS_EXPR:
8787 /* Non-zero iff operands of minus differ. */
8788 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
8789 TREE_OPERAND (exp, 0),
8790 TREE_OPERAND (exp, 1)),
8791 NE, NE, if_false_label, if_true_label);
8792 break;
8793
8794 case BIT_AND_EXPR:
8795 /* If we are AND'ing with a small constant, do this comparison in the
8796 smallest type that fits. If the machine doesn't have comparisons
8797 that small, it will be converted back to the wider comparison.
8798 This helps if we are testing the sign bit of a narrower object.
8799 combine can't do this for us because it can't know whether a
8800 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8801
8802 if (! SLOW_BYTE_ACCESS
8803 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8804 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8805 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8806 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8807 && (type = type_for_mode (mode, 1)) != 0
8808 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8809 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8810 != CODE_FOR_nothing))
8811 {
8812 do_jump (convert (type, exp), if_false_label, if_true_label);
8813 break;
8814 }
8815 goto normal;
8816
8817 case TRUTH_NOT_EXPR:
8818 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8819 break;
8820
8821 case TRUTH_ANDIF_EXPR:
8822 if (if_false_label == 0)
8823 if_false_label = drop_through_label = gen_label_rtx ();
8824 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8825 start_cleanup_deferral ();
8826 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8827 end_cleanup_deferral ();
8828 break;
8829
8830 case TRUTH_ORIF_EXPR:
8831 if (if_true_label == 0)
8832 if_true_label = drop_through_label = gen_label_rtx ();
8833 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8834 start_cleanup_deferral ();
8835 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8836 end_cleanup_deferral ();
8837 break;
8838
8839 case COMPOUND_EXPR:
8840 push_temp_slots ();
8841 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8842 preserve_temp_slots (NULL_RTX);
8843 free_temp_slots ();
8844 pop_temp_slots ();
8845 emit_queue ();
8846 do_pending_stack_adjust ();
8847 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8848 break;
8849
8850 case COMPONENT_REF:
8851 case BIT_FIELD_REF:
8852 case ARRAY_REF:
8853 {
8854 int bitsize, bitpos, unsignedp;
8855 enum machine_mode mode;
8856 tree type;
8857 tree offset;
8858 int volatilep = 0;
8859 int alignment;
8860
8861 /* Get description of this reference. We don't actually care
8862 about the underlying object here. */
8863 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8864 &mode, &unsignedp, &volatilep,
8865 &alignment);
8866
8867 type = type_for_size (bitsize, unsignedp);
8868 if (! SLOW_BYTE_ACCESS
8869 && type != 0 && bitsize >= 0
8870 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8871 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8872 != CODE_FOR_nothing))
8873 {
8874 do_jump (convert (type, exp), if_false_label, if_true_label);
8875 break;
8876 }
8877 goto normal;
8878 }
8879
8880 case COND_EXPR:
8881 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8882 if (integer_onep (TREE_OPERAND (exp, 1))
8883 && integer_zerop (TREE_OPERAND (exp, 2)))
8884 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8885
8886 else if (integer_zerop (TREE_OPERAND (exp, 1))
8887 && integer_onep (TREE_OPERAND (exp, 2)))
8888 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8889
8890 else
8891 {
8892 register rtx label1 = gen_label_rtx ();
8893 drop_through_label = gen_label_rtx ();
8894
8895 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8896
8897 start_cleanup_deferral ();
8898 /* Now the THEN-expression. */
8899 do_jump (TREE_OPERAND (exp, 1),
8900 if_false_label ? if_false_label : drop_through_label,
8901 if_true_label ? if_true_label : drop_through_label);
8902 /* In case the do_jump just above never jumps. */
8903 do_pending_stack_adjust ();
8904 emit_label (label1);
8905
8906 /* Now the ELSE-expression. */
8907 do_jump (TREE_OPERAND (exp, 2),
8908 if_false_label ? if_false_label : drop_through_label,
8909 if_true_label ? if_true_label : drop_through_label);
8910 end_cleanup_deferral ();
8911 }
8912 break;
8913
8914 case EQ_EXPR:
8915 {
8916 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8917
8918 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8919 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8920 {
8921 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8922 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8923 do_jump
8924 (fold
8925 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
8926 fold (build (EQ_EXPR, TREE_TYPE (exp),
8927 fold (build1 (REALPART_EXPR,
8928 TREE_TYPE (inner_type),
8929 exp0)),
8930 fold (build1 (REALPART_EXPR,
8931 TREE_TYPE (inner_type),
8932 exp1)))),
8933 fold (build (EQ_EXPR, TREE_TYPE (exp),
8934 fold (build1 (IMAGPART_EXPR,
8935 TREE_TYPE (inner_type),
8936 exp0)),
8937 fold (build1 (IMAGPART_EXPR,
8938 TREE_TYPE (inner_type),
8939 exp1)))))),
8940 if_false_label, if_true_label);
8941 }
8942
8943 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8944 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8945
8946 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8947 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
8948 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8949 else
8950 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
8951 break;
8952 }
8953
8954 case NE_EXPR:
8955 {
8956 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8957
8958 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8959 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8960 {
8961 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8962 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8963 do_jump
8964 (fold
8965 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
8966 fold (build (NE_EXPR, TREE_TYPE (exp),
8967 fold (build1 (REALPART_EXPR,
8968 TREE_TYPE (inner_type),
8969 exp0)),
8970 fold (build1 (REALPART_EXPR,
8971 TREE_TYPE (inner_type),
8972 exp1)))),
8973 fold (build (NE_EXPR, TREE_TYPE (exp),
8974 fold (build1 (IMAGPART_EXPR,
8975 TREE_TYPE (inner_type),
8976 exp0)),
8977 fold (build1 (IMAGPART_EXPR,
8978 TREE_TYPE (inner_type),
8979 exp1)))))),
8980 if_false_label, if_true_label);
8981 }
8982
8983 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8984 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8985
8986 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8987 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
8988 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8989 else
8990 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
8991 break;
8992 }
8993
8994 case LT_EXPR:
8995 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8996 if (GET_MODE_CLASS (mode) == MODE_INT
8997 && ! can_compare_p (mode, ccp_jump))
8998 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8999 else
9000 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9001 break;
9002
9003 case LE_EXPR:
9004 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9005 if (GET_MODE_CLASS (mode) == MODE_INT
9006 && ! can_compare_p (mode, ccp_jump))
9007 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9008 else
9009 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9010 break;
9011
9012 case GT_EXPR:
9013 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9014 if (GET_MODE_CLASS (mode) == MODE_INT
9015 && ! can_compare_p (mode, ccp_jump))
9016 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9017 else
9018 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9019 break;
9020
9021 case GE_EXPR:
9022 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9023 if (GET_MODE_CLASS (mode) == MODE_INT
9024 && ! can_compare_p (mode, ccp_jump))
9025 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9026 else
9027 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9028 break;
9029
9030 default:
9031 normal:
9032 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9033 #if 0
9034 /* This is not needed any more and causes poor code since it causes
9035 comparisons and tests from non-SI objects to have different code
9036 sequences. */
9037 /* Copy to register to avoid generating bad insns by cse
9038 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9039 if (!cse_not_expected && GET_CODE (temp) == MEM)
9040 temp = copy_to_reg (temp);
9041 #endif
9042 do_pending_stack_adjust ();
9043 /* Do any postincrements in the expression that was tested. */
9044 emit_queue ();
9045
9046 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9047 {
9048 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9049 if (target)
9050 emit_jump (target);
9051 }
9052 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9053 && ! can_compare_p (GET_MODE (temp), ccp_jump))
9054 /* Note swapping the labels gives us not-equal. */
9055 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9056 else if (GET_MODE (temp) != VOIDmode)
9057 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9058 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9059 GET_MODE (temp), NULL_RTX, 0,
9060 if_false_label, if_true_label);
9061 else
9062 abort ();
9063 }
9064
9065 if (drop_through_label)
9066 {
9067 /* If do_jump produces code that might be jumped around,
9068 do any stack adjusts from that code, before the place
9069 where control merges in. */
9070 do_pending_stack_adjust ();
9071 emit_label (drop_through_label);
9072 }
9073 }
9074 \f
9075 /* Given a comparison expression EXP for values too wide to be compared
9076 with one insn, test the comparison and jump to the appropriate label.
9077 The code of EXP is ignored; we always test GT if SWAP is 0,
9078 and LT if SWAP is 1. */
9079
9080 static void
9081 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9082 tree exp;
9083 int swap;
9084 rtx if_false_label, if_true_label;
9085 {
9086 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9087 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9088 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9089 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9090
9091 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9092 }
9093
9094 /* Compare OP0 with OP1, word at a time, in mode MODE.
9095 UNSIGNEDP says to do unsigned comparison.
9096 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9097
9098 void
9099 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9100 enum machine_mode mode;
9101 int unsignedp;
9102 rtx op0, op1;
9103 rtx if_false_label, if_true_label;
9104 {
9105 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9106 rtx drop_through_label = 0;
9107 int i;
9108
9109 if (! if_true_label || ! if_false_label)
9110 drop_through_label = gen_label_rtx ();
9111 if (! if_true_label)
9112 if_true_label = drop_through_label;
9113 if (! if_false_label)
9114 if_false_label = drop_through_label;
9115
9116 /* Compare a word at a time, high order first. */
9117 for (i = 0; i < nwords; i++)
9118 {
9119 rtx op0_word, op1_word;
9120
9121 if (WORDS_BIG_ENDIAN)
9122 {
9123 op0_word = operand_subword_force (op0, i, mode);
9124 op1_word = operand_subword_force (op1, i, mode);
9125 }
9126 else
9127 {
9128 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9129 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9130 }
9131
9132 /* All but high-order word must be compared as unsigned. */
9133 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9134 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9135 NULL_RTX, if_true_label);
9136
9137 /* Consider lower words only if these are equal. */
9138 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9139 NULL_RTX, 0, NULL_RTX, if_false_label);
9140 }
9141
9142 if (if_false_label)
9143 emit_jump (if_false_label);
9144 if (drop_through_label)
9145 emit_label (drop_through_label);
9146 }
9147
9148 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9149 with one insn, test the comparison and jump to the appropriate label. */
9150
9151 static void
9152 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9153 tree exp;
9154 rtx if_false_label, if_true_label;
9155 {
9156 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9157 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9158 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9159 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9160 int i;
9161 rtx drop_through_label = 0;
9162
9163 if (! if_false_label)
9164 drop_through_label = if_false_label = gen_label_rtx ();
9165
9166 for (i = 0; i < nwords; i++)
9167 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9168 operand_subword_force (op1, i, mode),
9169 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9170 word_mode, NULL_RTX, 0, if_false_label,
9171 NULL_RTX);
9172
9173 if (if_true_label)
9174 emit_jump (if_true_label);
9175 if (drop_through_label)
9176 emit_label (drop_through_label);
9177 }
9178 \f
9179 /* Jump according to whether OP0 is 0.
9180 We assume that OP0 has an integer mode that is too wide
9181 for the available compare insns. */
9182
9183 void
9184 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9185 rtx op0;
9186 rtx if_false_label, if_true_label;
9187 {
9188 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9189 rtx part;
9190 int i;
9191 rtx drop_through_label = 0;
9192
9193 /* The fastest way of doing this comparison on almost any machine is to
9194 "or" all the words and compare the result. If all have to be loaded
9195 from memory and this is a very wide item, it's possible this may
9196 be slower, but that's highly unlikely. */
9197
9198 part = gen_reg_rtx (word_mode);
9199 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9200 for (i = 1; i < nwords && part != 0; i++)
9201 part = expand_binop (word_mode, ior_optab, part,
9202 operand_subword_force (op0, i, GET_MODE (op0)),
9203 part, 1, OPTAB_WIDEN);
9204
9205 if (part != 0)
9206 {
9207 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9208 NULL_RTX, 0, if_false_label, if_true_label);
9209
9210 return;
9211 }
9212
9213 /* If we couldn't do the "or" simply, do this with a series of compares. */
9214 if (! if_false_label)
9215 drop_through_label = if_false_label = gen_label_rtx ();
9216
9217 for (i = 0; i < nwords; i++)
9218 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9219 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9220 if_false_label, NULL_RTX);
9221
9222 if (if_true_label)
9223 emit_jump (if_true_label);
9224
9225 if (drop_through_label)
9226 emit_label (drop_through_label);
9227 }
9228 \f
9229 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9230 (including code to compute the values to be compared)
9231 and set (CC0) according to the result.
9232 The decision as to signed or unsigned comparison must be made by the caller.
9233
9234 We force a stack adjustment unless there are currently
9235 things pushed on the stack that aren't yet used.
9236
9237 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9238 compared.
9239
9240 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9241 size of MODE should be used. */
9242
9243 rtx
9244 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9245 register rtx op0, op1;
9246 enum rtx_code code;
9247 int unsignedp;
9248 enum machine_mode mode;
9249 rtx size;
9250 int align;
9251 {
9252 rtx tem;
9253
9254 /* If one operand is constant, make it the second one. Only do this
9255 if the other operand is not constant as well. */
9256
9257 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9258 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9259 {
9260 tem = op0;
9261 op0 = op1;
9262 op1 = tem;
9263 code = swap_condition (code);
9264 }
9265
9266 if (flag_force_mem)
9267 {
9268 op0 = force_not_mem (op0);
9269 op1 = force_not_mem (op1);
9270 }
9271
9272 do_pending_stack_adjust ();
9273
9274 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9275 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9276 return tem;
9277
9278 #if 0
9279 /* There's no need to do this now that combine.c can eliminate lots of
9280 sign extensions. This can be less efficient in certain cases on other
9281 machines. */
9282
9283 /* If this is a signed equality comparison, we can do it as an
9284 unsigned comparison since zero-extension is cheaper than sign
9285 extension and comparisons with zero are done as unsigned. This is
9286 the case even on machines that can do fast sign extension, since
9287 zero-extension is easier to combine with other operations than
9288 sign-extension is. If we are comparing against a constant, we must
9289 convert it to what it would look like unsigned. */
9290 if ((code == EQ || code == NE) && ! unsignedp
9291 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9292 {
9293 if (GET_CODE (op1) == CONST_INT
9294 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9295 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9296 unsignedp = 1;
9297 }
9298 #endif
9299
9300 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9301
9302 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9303 }
9304
9305 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9306 The decision as to signed or unsigned comparison must be made by the caller.
9307
9308 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9309 compared.
9310
9311 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9312 size of MODE should be used. */
9313
9314 void
9315 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9316 if_false_label, if_true_label)
9317 register rtx op0, op1;
9318 enum rtx_code code;
9319 int unsignedp;
9320 enum machine_mode mode;
9321 rtx size;
9322 int align;
9323 rtx if_false_label, if_true_label;
9324 {
9325 rtx tem;
9326 int dummy_true_label = 0;
9327
9328 /* Reverse the comparison if that is safe and we want to jump if it is
9329 false. */
9330 if (! if_true_label && ! FLOAT_MODE_P (mode))
9331 {
9332 if_true_label = if_false_label;
9333 if_false_label = 0;
9334 code = reverse_condition (code);
9335 }
9336
9337 /* If one operand is constant, make it the second one. Only do this
9338 if the other operand is not constant as well. */
9339
9340 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9341 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9342 {
9343 tem = op0;
9344 op0 = op1;
9345 op1 = tem;
9346 code = swap_condition (code);
9347 }
9348
9349 if (flag_force_mem)
9350 {
9351 op0 = force_not_mem (op0);
9352 op1 = force_not_mem (op1);
9353 }
9354
9355 do_pending_stack_adjust ();
9356
9357 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9358 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9359 {
9360 if (tem == const_true_rtx)
9361 {
9362 if (if_true_label)
9363 emit_jump (if_true_label);
9364 }
9365 else
9366 {
9367 if (if_false_label)
9368 emit_jump (if_false_label);
9369 }
9370 return;
9371 }
9372
9373 #if 0
9374 /* There's no need to do this now that combine.c can eliminate lots of
9375 sign extensions. This can be less efficient in certain cases on other
9376 machines. */
9377
9378 /* If this is a signed equality comparison, we can do it as an
9379 unsigned comparison since zero-extension is cheaper than sign
9380 extension and comparisons with zero are done as unsigned. This is
9381 the case even on machines that can do fast sign extension, since
9382 zero-extension is easier to combine with other operations than
9383 sign-extension is. If we are comparing against a constant, we must
9384 convert it to what it would look like unsigned. */
9385 if ((code == EQ || code == NE) && ! unsignedp
9386 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9387 {
9388 if (GET_CODE (op1) == CONST_INT
9389 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9390 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9391 unsignedp = 1;
9392 }
9393 #endif
9394
9395 if (! if_true_label)
9396 {
9397 dummy_true_label = 1;
9398 if_true_label = gen_label_rtx ();
9399 }
9400
9401 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9402 if_true_label);
9403
9404 if (if_false_label)
9405 emit_jump (if_false_label);
9406 if (dummy_true_label)
9407 emit_label (if_true_label);
9408 }
9409
9410 /* Generate code for a comparison expression EXP (including code to compute
9411 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9412 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9413 generated code will drop through.
9414 SIGNED_CODE should be the rtx operation for this comparison for
9415 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9416
9417 We force a stack adjustment unless there are currently
9418 things pushed on the stack that aren't yet used. */
9419
9420 static void
9421 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9422 if_true_label)
9423 register tree exp;
9424 enum rtx_code signed_code, unsigned_code;
9425 rtx if_false_label, if_true_label;
9426 {
9427 register rtx op0, op1;
9428 register tree type;
9429 register enum machine_mode mode;
9430 int unsignedp;
9431 enum rtx_code code;
9432
9433 /* Don't crash if the comparison was erroneous. */
9434 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9435 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9436 return;
9437
9438 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9439 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9440 mode = TYPE_MODE (type);
9441 unsignedp = TREE_UNSIGNED (type);
9442 code = unsignedp ? unsigned_code : signed_code;
9443
9444 #ifdef HAVE_canonicalize_funcptr_for_compare
9445 /* If function pointers need to be "canonicalized" before they can
9446 be reliably compared, then canonicalize them. */
9447 if (HAVE_canonicalize_funcptr_for_compare
9448 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9449 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9450 == FUNCTION_TYPE))
9451 {
9452 rtx new_op0 = gen_reg_rtx (mode);
9453
9454 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9455 op0 = new_op0;
9456 }
9457
9458 if (HAVE_canonicalize_funcptr_for_compare
9459 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9460 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9461 == FUNCTION_TYPE))
9462 {
9463 rtx new_op1 = gen_reg_rtx (mode);
9464
9465 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9466 op1 = new_op1;
9467 }
9468 #endif
9469
9470 /* Do any postincrements in the expression that was tested. */
9471 emit_queue ();
9472
9473 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9474 ((mode == BLKmode)
9475 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9476 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
9477 if_false_label, if_true_label);
9478 }
9479 \f
9480 /* Generate code to calculate EXP using a store-flag instruction
9481 and return an rtx for the result. EXP is either a comparison
9482 or a TRUTH_NOT_EXPR whose operand is a comparison.
9483
9484 If TARGET is nonzero, store the result there if convenient.
9485
9486 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9487 cheap.
9488
9489 Return zero if there is no suitable set-flag instruction
9490 available on this machine.
9491
9492 Once expand_expr has been called on the arguments of the comparison,
9493 we are committed to doing the store flag, since it is not safe to
9494 re-evaluate the expression. We emit the store-flag insn by calling
9495 emit_store_flag, but only expand the arguments if we have a reason
9496 to believe that emit_store_flag will be successful. If we think that
9497 it will, but it isn't, we have to simulate the store-flag with a
9498 set/jump/set sequence. */
9499
9500 static rtx
9501 do_store_flag (exp, target, mode, only_cheap)
9502 tree exp;
9503 rtx target;
9504 enum machine_mode mode;
9505 int only_cheap;
9506 {
9507 enum rtx_code code;
9508 tree arg0, arg1, type;
9509 tree tem;
9510 enum machine_mode operand_mode;
9511 int invert = 0;
9512 int unsignedp;
9513 rtx op0, op1;
9514 enum insn_code icode;
9515 rtx subtarget = target;
9516 rtx result, label;
9517
9518 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9519 result at the end. We can't simply invert the test since it would
9520 have already been inverted if it were valid. This case occurs for
9521 some floating-point comparisons. */
9522
9523 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9524 invert = 1, exp = TREE_OPERAND (exp, 0);
9525
9526 arg0 = TREE_OPERAND (exp, 0);
9527 arg1 = TREE_OPERAND (exp, 1);
9528 type = TREE_TYPE (arg0);
9529 operand_mode = TYPE_MODE (type);
9530 unsignedp = TREE_UNSIGNED (type);
9531
9532 /* We won't bother with BLKmode store-flag operations because it would mean
9533 passing a lot of information to emit_store_flag. */
9534 if (operand_mode == BLKmode)
9535 return 0;
9536
9537 /* We won't bother with store-flag operations involving function pointers
9538 when function pointers must be canonicalized before comparisons. */
9539 #ifdef HAVE_canonicalize_funcptr_for_compare
9540 if (HAVE_canonicalize_funcptr_for_compare
9541 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9542 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9543 == FUNCTION_TYPE))
9544 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9545 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9546 == FUNCTION_TYPE))))
9547 return 0;
9548 #endif
9549
9550 STRIP_NOPS (arg0);
9551 STRIP_NOPS (arg1);
9552
9553 /* Get the rtx comparison code to use. We know that EXP is a comparison
9554 operation of some type. Some comparisons against 1 and -1 can be
9555 converted to comparisons with zero. Do so here so that the tests
9556 below will be aware that we have a comparison with zero. These
9557 tests will not catch constants in the first operand, but constants
9558 are rarely passed as the first operand. */
9559
9560 switch (TREE_CODE (exp))
9561 {
9562 case EQ_EXPR:
9563 code = EQ;
9564 break;
9565 case NE_EXPR:
9566 code = NE;
9567 break;
9568 case LT_EXPR:
9569 if (integer_onep (arg1))
9570 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9571 else
9572 code = unsignedp ? LTU : LT;
9573 break;
9574 case LE_EXPR:
9575 if (! unsignedp && integer_all_onesp (arg1))
9576 arg1 = integer_zero_node, code = LT;
9577 else
9578 code = unsignedp ? LEU : LE;
9579 break;
9580 case GT_EXPR:
9581 if (! unsignedp && integer_all_onesp (arg1))
9582 arg1 = integer_zero_node, code = GE;
9583 else
9584 code = unsignedp ? GTU : GT;
9585 break;
9586 case GE_EXPR:
9587 if (integer_onep (arg1))
9588 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9589 else
9590 code = unsignedp ? GEU : GE;
9591 break;
9592 default:
9593 abort ();
9594 }
9595
9596 /* Put a constant second. */
9597 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9598 {
9599 tem = arg0; arg0 = arg1; arg1 = tem;
9600 code = swap_condition (code);
9601 }
9602
9603 /* If this is an equality or inequality test of a single bit, we can
9604 do this by shifting the bit being tested to the low-order bit and
9605 masking the result with the constant 1. If the condition was EQ,
9606 we xor it with 1. This does not require an scc insn and is faster
9607 than an scc insn even if we have it. */
9608
9609 if ((code == NE || code == EQ)
9610 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9611 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9612 {
9613 tree inner = TREE_OPERAND (arg0, 0);
9614 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9615 int ops_unsignedp;
9616
9617 /* If INNER is a right shift of a constant and it plus BITNUM does
9618 not overflow, adjust BITNUM and INNER. */
9619
9620 if (TREE_CODE (inner) == RSHIFT_EXPR
9621 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9622 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9623 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9624 < TYPE_PRECISION (type)))
9625 {
9626 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9627 inner = TREE_OPERAND (inner, 0);
9628 }
9629
9630 /* If we are going to be able to omit the AND below, we must do our
9631 operations as unsigned. If we must use the AND, we have a choice.
9632 Normally unsigned is faster, but for some machines signed is. */
9633 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9634 #ifdef LOAD_EXTEND_OP
9635 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9636 #else
9637 : 1
9638 #endif
9639 );
9640
9641 if (subtarget == 0 || GET_CODE (subtarget) != REG
9642 || GET_MODE (subtarget) != operand_mode
9643 || ! safe_from_p (subtarget, inner, 1))
9644 subtarget = 0;
9645
9646 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9647
9648 if (bitnum != 0)
9649 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9650 size_int (bitnum), subtarget, ops_unsignedp);
9651
9652 if (GET_MODE (op0) != mode)
9653 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9654
9655 if ((code == EQ && ! invert) || (code == NE && invert))
9656 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9657 ops_unsignedp, OPTAB_LIB_WIDEN);
9658
9659 /* Put the AND last so it can combine with more things. */
9660 if (bitnum != TYPE_PRECISION (type) - 1)
9661 op0 = expand_and (op0, const1_rtx, subtarget);
9662
9663 return op0;
9664 }
9665
9666 /* Now see if we are likely to be able to do this. Return if not. */
9667 if (! can_compare_p (operand_mode, ccp_store_flag))
9668 return 0;
9669 icode = setcc_gen_code[(int) code];
9670 if (icode == CODE_FOR_nothing
9671 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9672 {
9673 /* We can only do this if it is one of the special cases that
9674 can be handled without an scc insn. */
9675 if ((code == LT && integer_zerop (arg1))
9676 || (! only_cheap && code == GE && integer_zerop (arg1)))
9677 ;
9678 else if (BRANCH_COST >= 0
9679 && ! only_cheap && (code == NE || code == EQ)
9680 && TREE_CODE (type) != REAL_TYPE
9681 && ((abs_optab->handlers[(int) operand_mode].insn_code
9682 != CODE_FOR_nothing)
9683 || (ffs_optab->handlers[(int) operand_mode].insn_code
9684 != CODE_FOR_nothing)))
9685 ;
9686 else
9687 return 0;
9688 }
9689
9690 preexpand_calls (exp);
9691 if (subtarget == 0 || GET_CODE (subtarget) != REG
9692 || GET_MODE (subtarget) != operand_mode
9693 || ! safe_from_p (subtarget, arg1, 1))
9694 subtarget = 0;
9695
9696 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9697 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9698
9699 if (target == 0)
9700 target = gen_reg_rtx (mode);
9701
9702 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9703 because, if the emit_store_flag does anything it will succeed and
9704 OP0 and OP1 will not be used subsequently. */
9705
9706 result = emit_store_flag (target, code,
9707 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9708 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9709 operand_mode, unsignedp, 1);
9710
9711 if (result)
9712 {
9713 if (invert)
9714 result = expand_binop (mode, xor_optab, result, const1_rtx,
9715 result, 0, OPTAB_LIB_WIDEN);
9716 return result;
9717 }
9718
9719 /* If this failed, we have to do this with set/compare/jump/set code. */
9720 if (GET_CODE (target) != REG
9721 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9722 target = gen_reg_rtx (GET_MODE (target));
9723
9724 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9725 result = compare_from_rtx (op0, op1, code, unsignedp,
9726 operand_mode, NULL_RTX, 0);
9727 if (GET_CODE (result) == CONST_INT)
9728 return (((result == const0_rtx && ! invert)
9729 || (result != const0_rtx && invert))
9730 ? const0_rtx : const1_rtx);
9731
9732 label = gen_label_rtx ();
9733 if (bcc_gen_fctn[(int) code] == 0)
9734 abort ();
9735
9736 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9737 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9738 emit_label (label);
9739
9740 return target;
9741 }
9742 \f
9743 /* Generate a tablejump instruction (used for switch statements). */
9744
9745 #ifdef HAVE_tablejump
9746
9747 /* INDEX is the value being switched on, with the lowest value
9748 in the table already subtracted.
9749 MODE is its expected mode (needed if INDEX is constant).
9750 RANGE is the length of the jump table.
9751 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9752
9753 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9754 index value is out of range. */
9755
9756 void
9757 do_tablejump (index, mode, range, table_label, default_label)
9758 rtx index, range, table_label, default_label;
9759 enum machine_mode mode;
9760 {
9761 register rtx temp, vector;
9762
9763 /* Do an unsigned comparison (in the proper mode) between the index
9764 expression and the value which represents the length of the range.
9765 Since we just finished subtracting the lower bound of the range
9766 from the index expression, this comparison allows us to simultaneously
9767 check that the original index expression value is both greater than
9768 or equal to the minimum value of the range and less than or equal to
9769 the maximum value of the range. */
9770
9771 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9772 0, default_label);
9773
9774 /* If index is in range, it must fit in Pmode.
9775 Convert to Pmode so we can index with it. */
9776 if (mode != Pmode)
9777 index = convert_to_mode (Pmode, index, 1);
9778
9779 /* Don't let a MEM slip thru, because then INDEX that comes
9780 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9781 and break_out_memory_refs will go to work on it and mess it up. */
9782 #ifdef PIC_CASE_VECTOR_ADDRESS
9783 if (flag_pic && GET_CODE (index) != REG)
9784 index = copy_to_mode_reg (Pmode, index);
9785 #endif
9786
9787 /* If flag_force_addr were to affect this address
9788 it could interfere with the tricky assumptions made
9789 about addresses that contain label-refs,
9790 which may be valid only very near the tablejump itself. */
9791 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9792 GET_MODE_SIZE, because this indicates how large insns are. The other
9793 uses should all be Pmode, because they are addresses. This code
9794 could fail if addresses and insns are not the same size. */
9795 index = gen_rtx_PLUS (Pmode,
9796 gen_rtx_MULT (Pmode, index,
9797 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9798 gen_rtx_LABEL_REF (Pmode, table_label));
9799 #ifdef PIC_CASE_VECTOR_ADDRESS
9800 if (flag_pic)
9801 index = PIC_CASE_VECTOR_ADDRESS (index);
9802 else
9803 #endif
9804 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9805 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9806 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9807 RTX_UNCHANGING_P (vector) = 1;
9808 convert_move (temp, vector, 0);
9809
9810 emit_jump_insn (gen_tablejump (temp, table_label));
9811
9812 /* If we are generating PIC code or if the table is PC-relative, the
9813 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9814 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9815 emit_barrier ();
9816 }
9817
9818 #endif /* HAVE_tablejump */