Make it possible to prototype port-specific functions (and convert i386 to use this)
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
53
54 #ifdef PUSH_ROUNDING
55
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
59
60 #endif
61
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
69
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
73 #endif
74
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81 int cse_not_expected;
82
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
87
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage;
92
93 /* This structure is used by move_by_pieces to describe the move to
94 be performed. */
95 struct move_by_pieces
96 {
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 int to_struct;
102 int to_readonly;
103 rtx from;
104 rtx from_addr;
105 int autinc_from;
106 int explicit_inc_from;
107 int from_struct;
108 int from_readonly;
109 int len;
110 int offset;
111 int reverse;
112 };
113
114 /* This structure is used by clear_by_pieces to describe the clear to
115 be performed. */
116
117 struct clear_by_pieces
118 {
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 int to_struct;
124 int len;
125 int offset;
126 int reverse;
127 };
128
129 extern struct obstack permanent_obstack;
130
131 static rtx get_push_address PROTO ((int));
132
133 static rtx enqueue_insn PROTO((rtx, rtx));
134 static int move_by_pieces_ninsns PROTO((unsigned int, int));
135 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
136 struct move_by_pieces *));
137 static void clear_by_pieces PROTO((rtx, int, int));
138 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
139 enum machine_mode,
140 struct clear_by_pieces *));
141 static int is_zeros_p PROTO((tree));
142 static int mostly_zeros_p PROTO((tree));
143 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
144 tree, tree, int, int));
145 static void store_constructor PROTO((tree, rtx, int, int));
146 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
147 enum machine_mode, int, int,
148 int, int));
149 static enum memory_use_mode
150 get_memory_usage_from_modifier PROTO((enum expand_modifier));
151 static tree save_noncopied_parts PROTO((tree, tree));
152 static tree init_noncopied_parts PROTO((tree, tree));
153 static int safe_from_p PROTO((rtx, tree, int));
154 static int fixed_type_p PROTO((tree));
155 static rtx var_rtx PROTO((tree));
156 static rtx expand_increment PROTO((tree, int, int));
157 static void preexpand_calls PROTO((tree));
158 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
159 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
160 static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
161 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
162
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169
170 /* If a memory-to-memory move would take MOVE_RATIO or more simple
171 move-instruction sequences, we will do a movstr or libcall instead. */
172
173 #ifndef MOVE_RATIO
174 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175 #define MOVE_RATIO 2
176 #else
177 /* If we are optimizing for space (-Os), cut down the default move ratio */
178 #define MOVE_RATIO (optimize_size ? 3 : 15)
179 #endif
180 #endif
181
182 /* This macro is used to determine whether move_by_pieces should be called
183 to perform a structure copy. */
184 #ifndef MOVE_BY_PIECES_P
185 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
186 (SIZE, ALIGN) < MOVE_RATIO)
187 #endif
188
189 /* This array records the insn_code of insns to perform block moves. */
190 enum insn_code movstr_optab[NUM_MACHINE_MODES];
191
192 /* This array records the insn_code of insns to perform block clears. */
193 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
194
195 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
196
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
199 #endif
200 \f
201 /* This is run once per compilation to set up which modes can be used
202 directly in memory and to initialize the block move optab. */
203
204 void
205 init_expr_once ()
206 {
207 rtx insn, pat;
208 enum machine_mode mode;
209 int num_clobbers;
210 rtx mem, mem1;
211 char *free_point;
212
213 start_sequence ();
214
215 /* Since we are on the permanent obstack, we must be sure we save this
216 spot AFTER we call start_sequence, since it will reuse the rtl it
217 makes. */
218 free_point = (char *) oballoc (0);
219
220 /* Try indexing by frame ptr and try by stack ptr.
221 It is known that on the Convex the stack ptr isn't a valid index.
222 With luck, one or the other is valid on any machine. */
223 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
224 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
225
226 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
227 pat = PATTERN (insn);
228
229 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
230 mode = (enum machine_mode) ((int) mode + 1))
231 {
232 int regno;
233 rtx reg;
234
235 direct_load[(int) mode] = direct_store[(int) mode] = 0;
236 PUT_MODE (mem, mode);
237 PUT_MODE (mem1, mode);
238
239 /* See if there is some register that can be used in this mode and
240 directly loaded or stored from memory. */
241
242 if (mode != VOIDmode && mode != BLKmode)
243 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
244 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
245 regno++)
246 {
247 if (! HARD_REGNO_MODE_OK (regno, mode))
248 continue;
249
250 reg = gen_rtx_REG (mode, regno);
251
252 SET_SRC (pat) = mem;
253 SET_DEST (pat) = reg;
254 if (recog (pat, insn, &num_clobbers) >= 0)
255 direct_load[(int) mode] = 1;
256
257 SET_SRC (pat) = mem1;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
261
262 SET_SRC (pat) = reg;
263 SET_DEST (pat) = mem;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_store[(int) mode] = 1;
266
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem1;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
271 }
272 }
273
274 end_sequence ();
275 obfree (free_point);
276 }
277
278 /* This is run at the start of compiling a function. */
279
280 void
281 init_expr ()
282 {
283 current_function->expr
284 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
285
286 pending_chain = 0;
287 pending_stack_adjust = 0;
288 inhibit_defer_pop = 0;
289 saveregs_value = 0;
290 apply_args_value = 0;
291 forced_labels = 0;
292 }
293
294 void
295 mark_expr_status (p)
296 struct expr_status *p;
297 {
298 if (p == NULL)
299 return;
300
301 ggc_mark_rtx (p->x_saveregs_value);
302 ggc_mark_rtx (p->x_apply_args_value);
303 ggc_mark_rtx (p->x_forced_labels);
304 }
305
306 void
307 free_expr_status (f)
308 struct function *f;
309 {
310 free (f->expr);
311 f->expr = NULL;
312 }
313
314 /* Small sanity check that the queue is empty at the end of a function. */
315 void
316 finish_expr_for_function ()
317 {
318 if (pending_chain)
319 abort ();
320 }
321 \f
322 /* Manage the queue of increment instructions to be output
323 for POSTINCREMENT_EXPR expressions, etc. */
324
325 /* Queue up to increment (or change) VAR later. BODY says how:
326 BODY should be the same thing you would pass to emit_insn
327 to increment right away. It will go to emit_insn later on.
328
329 The value is a QUEUED expression to be used in place of VAR
330 where you want to guarantee the pre-incrementation value of VAR. */
331
332 static rtx
333 enqueue_insn (var, body)
334 rtx var, body;
335 {
336 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
337 body, pending_chain);
338 return pending_chain;
339 }
340
341 /* Use protect_from_queue to convert a QUEUED expression
342 into something that you can put immediately into an instruction.
343 If the queued incrementation has not happened yet,
344 protect_from_queue returns the variable itself.
345 If the incrementation has happened, protect_from_queue returns a temp
346 that contains a copy of the old value of the variable.
347
348 Any time an rtx which might possibly be a QUEUED is to be put
349 into an instruction, it must be passed through protect_from_queue first.
350 QUEUED expressions are not meaningful in instructions.
351
352 Do not pass a value through protect_from_queue and then hold
353 on to it for a while before putting it in an instruction!
354 If the queue is flushed in between, incorrect code will result. */
355
356 rtx
357 protect_from_queue (x, modify)
358 register rtx x;
359 int modify;
360 {
361 register RTX_CODE code = GET_CODE (x);
362
363 #if 0 /* A QUEUED can hang around after the queue is forced out. */
364 /* Shortcut for most common case. */
365 if (pending_chain == 0)
366 return x;
367 #endif
368
369 if (code != QUEUED)
370 {
371 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
372 use of autoincrement. Make a copy of the contents of the memory
373 location rather than a copy of the address, but not if the value is
374 of mode BLKmode. Don't modify X in place since it might be
375 shared. */
376 if (code == MEM && GET_MODE (x) != BLKmode
377 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
378 {
379 register rtx y = XEXP (x, 0);
380 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
381
382 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
383 MEM_COPY_ATTRIBUTES (new, x);
384 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
385
386 if (QUEUED_INSN (y))
387 {
388 register rtx temp = gen_reg_rtx (GET_MODE (new));
389 emit_insn_before (gen_move_insn (temp, new),
390 QUEUED_INSN (y));
391 return temp;
392 }
393 return new;
394 }
395 /* Otherwise, recursively protect the subexpressions of all
396 the kinds of rtx's that can contain a QUEUED. */
397 if (code == MEM)
398 {
399 rtx tem = protect_from_queue (XEXP (x, 0), 0);
400 if (tem != XEXP (x, 0))
401 {
402 x = copy_rtx (x);
403 XEXP (x, 0) = tem;
404 }
405 }
406 else if (code == PLUS || code == MULT)
407 {
408 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
409 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
410 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
411 {
412 x = copy_rtx (x);
413 XEXP (x, 0) = new0;
414 XEXP (x, 1) = new1;
415 }
416 }
417 return x;
418 }
419 /* If the increment has not happened, use the variable itself. */
420 if (QUEUED_INSN (x) == 0)
421 return QUEUED_VAR (x);
422 /* If the increment has happened and a pre-increment copy exists,
423 use that copy. */
424 if (QUEUED_COPY (x) != 0)
425 return QUEUED_COPY (x);
426 /* The increment has happened but we haven't set up a pre-increment copy.
427 Set one up now, and use it. */
428 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
429 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
430 QUEUED_INSN (x));
431 return QUEUED_COPY (x);
432 }
433
434 /* Return nonzero if X contains a QUEUED expression:
435 if it contains anything that will be altered by a queued increment.
436 We handle only combinations of MEM, PLUS, MINUS and MULT operators
437 since memory addresses generally contain only those. */
438
439 int
440 queued_subexp_p (x)
441 rtx x;
442 {
443 register enum rtx_code code = GET_CODE (x);
444 switch (code)
445 {
446 case QUEUED:
447 return 1;
448 case MEM:
449 return queued_subexp_p (XEXP (x, 0));
450 case MULT:
451 case PLUS:
452 case MINUS:
453 return (queued_subexp_p (XEXP (x, 0))
454 || queued_subexp_p (XEXP (x, 1)));
455 default:
456 return 0;
457 }
458 }
459
460 /* Perform all the pending incrementations. */
461
462 void
463 emit_queue ()
464 {
465 register rtx p;
466 while ((p = pending_chain))
467 {
468 rtx body = QUEUED_BODY (p);
469
470 if (GET_CODE (body) == SEQUENCE)
471 {
472 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
473 emit_insn (QUEUED_BODY (p));
474 }
475 else
476 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
477 pending_chain = QUEUED_NEXT (p);
478 }
479 }
480 \f
481 /* Copy data from FROM to TO, where the machine modes are not the same.
482 Both modes may be integer, or both may be floating.
483 UNSIGNEDP should be nonzero if FROM is an unsigned type.
484 This causes zero-extension instead of sign-extension. */
485
486 void
487 convert_move (to, from, unsignedp)
488 register rtx to, from;
489 int unsignedp;
490 {
491 enum machine_mode to_mode = GET_MODE (to);
492 enum machine_mode from_mode = GET_MODE (from);
493 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
494 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
495 enum insn_code code;
496 rtx libcall;
497
498 /* rtx code for making an equivalent value. */
499 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
500
501 to = protect_from_queue (to, 1);
502 from = protect_from_queue (from, 0);
503
504 if (to_real != from_real)
505 abort ();
506
507 /* If FROM is a SUBREG that indicates that we have already done at least
508 the required extension, strip it. We don't handle such SUBREGs as
509 TO here. */
510
511 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
512 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
513 >= GET_MODE_SIZE (to_mode))
514 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
515 from = gen_lowpart (to_mode, from), from_mode = to_mode;
516
517 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
518 abort ();
519
520 if (to_mode == from_mode
521 || (from_mode == VOIDmode && CONSTANT_P (from)))
522 {
523 emit_move_insn (to, from);
524 return;
525 }
526
527 if (to_real)
528 {
529 rtx value;
530
531 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
532 {
533 /* Try converting directly if the insn is supported. */
534 if ((code = can_extend_p (to_mode, from_mode, 0))
535 != CODE_FOR_nothing)
536 {
537 emit_unop_insn (code, to, from, UNKNOWN);
538 return;
539 }
540 }
541
542 #ifdef HAVE_trunchfqf2
543 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
544 {
545 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
546 return;
547 }
548 #endif
549 #ifdef HAVE_trunctqfqf2
550 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
551 {
552 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
553 return;
554 }
555 #endif
556 #ifdef HAVE_truncsfqf2
557 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
558 {
559 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
560 return;
561 }
562 #endif
563 #ifdef HAVE_truncdfqf2
564 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
565 {
566 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
567 return;
568 }
569 #endif
570 #ifdef HAVE_truncxfqf2
571 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
572 {
573 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
574 return;
575 }
576 #endif
577 #ifdef HAVE_trunctfqf2
578 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
579 {
580 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
581 return;
582 }
583 #endif
584
585 #ifdef HAVE_trunctqfhf2
586 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
587 {
588 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
589 return;
590 }
591 #endif
592 #ifdef HAVE_truncsfhf2
593 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
594 {
595 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
596 return;
597 }
598 #endif
599 #ifdef HAVE_truncdfhf2
600 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
601 {
602 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
603 return;
604 }
605 #endif
606 #ifdef HAVE_truncxfhf2
607 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
608 {
609 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
610 return;
611 }
612 #endif
613 #ifdef HAVE_trunctfhf2
614 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
615 {
616 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
617 return;
618 }
619 #endif
620
621 #ifdef HAVE_truncsftqf2
622 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
623 {
624 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
625 return;
626 }
627 #endif
628 #ifdef HAVE_truncdftqf2
629 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
630 {
631 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
632 return;
633 }
634 #endif
635 #ifdef HAVE_truncxftqf2
636 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
637 {
638 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
639 return;
640 }
641 #endif
642 #ifdef HAVE_trunctftqf2
643 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
644 {
645 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
646 return;
647 }
648 #endif
649
650 #ifdef HAVE_truncdfsf2
651 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
652 {
653 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
654 return;
655 }
656 #endif
657 #ifdef HAVE_truncxfsf2
658 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
659 {
660 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
661 return;
662 }
663 #endif
664 #ifdef HAVE_trunctfsf2
665 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
666 {
667 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
668 return;
669 }
670 #endif
671 #ifdef HAVE_truncxfdf2
672 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
673 {
674 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
675 return;
676 }
677 #endif
678 #ifdef HAVE_trunctfdf2
679 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
680 {
681 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
682 return;
683 }
684 #endif
685
686 libcall = (rtx) 0;
687 switch (from_mode)
688 {
689 case SFmode:
690 switch (to_mode)
691 {
692 case DFmode:
693 libcall = extendsfdf2_libfunc;
694 break;
695
696 case XFmode:
697 libcall = extendsfxf2_libfunc;
698 break;
699
700 case TFmode:
701 libcall = extendsftf2_libfunc;
702 break;
703
704 default:
705 break;
706 }
707 break;
708
709 case DFmode:
710 switch (to_mode)
711 {
712 case SFmode:
713 libcall = truncdfsf2_libfunc;
714 break;
715
716 case XFmode:
717 libcall = extenddfxf2_libfunc;
718 break;
719
720 case TFmode:
721 libcall = extenddftf2_libfunc;
722 break;
723
724 default:
725 break;
726 }
727 break;
728
729 case XFmode:
730 switch (to_mode)
731 {
732 case SFmode:
733 libcall = truncxfsf2_libfunc;
734 break;
735
736 case DFmode:
737 libcall = truncxfdf2_libfunc;
738 break;
739
740 default:
741 break;
742 }
743 break;
744
745 case TFmode:
746 switch (to_mode)
747 {
748 case SFmode:
749 libcall = trunctfsf2_libfunc;
750 break;
751
752 case DFmode:
753 libcall = trunctfdf2_libfunc;
754 break;
755
756 default:
757 break;
758 }
759 break;
760
761 default:
762 break;
763 }
764
765 if (libcall == (rtx) 0)
766 /* This conversion is not implemented yet. */
767 abort ();
768
769 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
770 1, from, from_mode);
771 emit_move_insn (to, value);
772 return;
773 }
774
775 /* Now both modes are integers. */
776
777 /* Handle expanding beyond a word. */
778 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
779 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
780 {
781 rtx insns;
782 rtx lowpart;
783 rtx fill_value;
784 rtx lowfrom;
785 int i;
786 enum machine_mode lowpart_mode;
787 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
788
789 /* Try converting directly if the insn is supported. */
790 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
791 != CODE_FOR_nothing)
792 {
793 /* If FROM is a SUBREG, put it into a register. Do this
794 so that we always generate the same set of insns for
795 better cse'ing; if an intermediate assignment occurred,
796 we won't be doing the operation directly on the SUBREG. */
797 if (optimize > 0 && GET_CODE (from) == SUBREG)
798 from = force_reg (from_mode, from);
799 emit_unop_insn (code, to, from, equiv_code);
800 return;
801 }
802 /* Next, try converting via full word. */
803 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
804 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
805 != CODE_FOR_nothing))
806 {
807 if (GET_CODE (to) == REG)
808 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
809 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
810 emit_unop_insn (code, to,
811 gen_lowpart (word_mode, to), equiv_code);
812 return;
813 }
814
815 /* No special multiword conversion insn; do it by hand. */
816 start_sequence ();
817
818 /* Since we will turn this into a no conflict block, we must ensure
819 that the source does not overlap the target. */
820
821 if (reg_overlap_mentioned_p (to, from))
822 from = force_reg (from_mode, from);
823
824 /* Get a copy of FROM widened to a word, if necessary. */
825 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
826 lowpart_mode = word_mode;
827 else
828 lowpart_mode = from_mode;
829
830 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
831
832 lowpart = gen_lowpart (lowpart_mode, to);
833 emit_move_insn (lowpart, lowfrom);
834
835 /* Compute the value to put in each remaining word. */
836 if (unsignedp)
837 fill_value = const0_rtx;
838 else
839 {
840 #ifdef HAVE_slt
841 if (HAVE_slt
842 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
843 && STORE_FLAG_VALUE == -1)
844 {
845 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
846 lowpart_mode, 0, 0);
847 fill_value = gen_reg_rtx (word_mode);
848 emit_insn (gen_slt (fill_value));
849 }
850 else
851 #endif
852 {
853 fill_value
854 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
855 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
856 NULL_RTX, 0);
857 fill_value = convert_to_mode (word_mode, fill_value, 1);
858 }
859 }
860
861 /* Fill the remaining words. */
862 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
863 {
864 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
865 rtx subword = operand_subword (to, index, 1, to_mode);
866
867 if (subword == 0)
868 abort ();
869
870 if (fill_value != subword)
871 emit_move_insn (subword, fill_value);
872 }
873
874 insns = get_insns ();
875 end_sequence ();
876
877 emit_no_conflict_block (insns, to, from, NULL_RTX,
878 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
879 return;
880 }
881
882 /* Truncating multi-word to a word or less. */
883 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
884 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
885 {
886 if (!((GET_CODE (from) == MEM
887 && ! MEM_VOLATILE_P (from)
888 && direct_load[(int) to_mode]
889 && ! mode_dependent_address_p (XEXP (from, 0)))
890 || GET_CODE (from) == REG
891 || GET_CODE (from) == SUBREG))
892 from = force_reg (from_mode, from);
893 convert_move (to, gen_lowpart (word_mode, from), 0);
894 return;
895 }
896
897 /* Handle pointer conversion */ /* SPEE 900220 */
898 if (to_mode == PQImode)
899 {
900 if (from_mode != QImode)
901 from = convert_to_mode (QImode, from, unsignedp);
902
903 #ifdef HAVE_truncqipqi2
904 if (HAVE_truncqipqi2)
905 {
906 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
907 return;
908 }
909 #endif /* HAVE_truncqipqi2 */
910 abort ();
911 }
912
913 if (from_mode == PQImode)
914 {
915 if (to_mode != QImode)
916 {
917 from = convert_to_mode (QImode, from, unsignedp);
918 from_mode = QImode;
919 }
920 else
921 {
922 #ifdef HAVE_extendpqiqi2
923 if (HAVE_extendpqiqi2)
924 {
925 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
926 return;
927 }
928 #endif /* HAVE_extendpqiqi2 */
929 abort ();
930 }
931 }
932
933 if (to_mode == PSImode)
934 {
935 if (from_mode != SImode)
936 from = convert_to_mode (SImode, from, unsignedp);
937
938 #ifdef HAVE_truncsipsi2
939 if (HAVE_truncsipsi2)
940 {
941 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
942 return;
943 }
944 #endif /* HAVE_truncsipsi2 */
945 abort ();
946 }
947
948 if (from_mode == PSImode)
949 {
950 if (to_mode != SImode)
951 {
952 from = convert_to_mode (SImode, from, unsignedp);
953 from_mode = SImode;
954 }
955 else
956 {
957 #ifdef HAVE_extendpsisi2
958 if (HAVE_extendpsisi2)
959 {
960 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
961 return;
962 }
963 #endif /* HAVE_extendpsisi2 */
964 abort ();
965 }
966 }
967
968 if (to_mode == PDImode)
969 {
970 if (from_mode != DImode)
971 from = convert_to_mode (DImode, from, unsignedp);
972
973 #ifdef HAVE_truncdipdi2
974 if (HAVE_truncdipdi2)
975 {
976 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
977 return;
978 }
979 #endif /* HAVE_truncdipdi2 */
980 abort ();
981 }
982
983 if (from_mode == PDImode)
984 {
985 if (to_mode != DImode)
986 {
987 from = convert_to_mode (DImode, from, unsignedp);
988 from_mode = DImode;
989 }
990 else
991 {
992 #ifdef HAVE_extendpdidi2
993 if (HAVE_extendpdidi2)
994 {
995 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
996 return;
997 }
998 #endif /* HAVE_extendpdidi2 */
999 abort ();
1000 }
1001 }
1002
1003 /* Now follow all the conversions between integers
1004 no more than a word long. */
1005
1006 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1007 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1008 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1009 GET_MODE_BITSIZE (from_mode)))
1010 {
1011 if (!((GET_CODE (from) == MEM
1012 && ! MEM_VOLATILE_P (from)
1013 && direct_load[(int) to_mode]
1014 && ! mode_dependent_address_p (XEXP (from, 0)))
1015 || GET_CODE (from) == REG
1016 || GET_CODE (from) == SUBREG))
1017 from = force_reg (from_mode, from);
1018 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1019 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1020 from = copy_to_reg (from);
1021 emit_move_insn (to, gen_lowpart (to_mode, from));
1022 return;
1023 }
1024
1025 /* Handle extension. */
1026 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1027 {
1028 /* Convert directly if that works. */
1029 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1030 != CODE_FOR_nothing)
1031 {
1032 emit_unop_insn (code, to, from, equiv_code);
1033 return;
1034 }
1035 else
1036 {
1037 enum machine_mode intermediate;
1038 rtx tmp;
1039 tree shift_amount;
1040
1041 /* Search for a mode to convert via. */
1042 for (intermediate = from_mode; intermediate != VOIDmode;
1043 intermediate = GET_MODE_WIDER_MODE (intermediate))
1044 if (((can_extend_p (to_mode, intermediate, unsignedp)
1045 != CODE_FOR_nothing)
1046 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (intermediate))))
1049 && (can_extend_p (intermediate, from_mode, unsignedp)
1050 != CODE_FOR_nothing))
1051 {
1052 convert_move (to, convert_to_mode (intermediate, from,
1053 unsignedp), unsignedp);
1054 return;
1055 }
1056
1057 /* No suitable intermediate mode.
1058 Generate what we need with shifts. */
1059 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1060 - GET_MODE_BITSIZE (from_mode), 0);
1061 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1062 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1063 to, unsignedp);
1064 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1065 to, unsignedp);
1066 if (tmp != to)
1067 emit_move_insn (to, tmp);
1068 return;
1069 }
1070 }
1071
1072 /* Support special truncate insns for certain modes. */
1073
1074 if (from_mode == DImode && to_mode == SImode)
1075 {
1076 #ifdef HAVE_truncdisi2
1077 if (HAVE_truncdisi2)
1078 {
1079 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1080 return;
1081 }
1082 #endif
1083 convert_move (to, force_reg (from_mode, from), unsignedp);
1084 return;
1085 }
1086
1087 if (from_mode == DImode && to_mode == HImode)
1088 {
1089 #ifdef HAVE_truncdihi2
1090 if (HAVE_truncdihi2)
1091 {
1092 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1093 return;
1094 }
1095 #endif
1096 convert_move (to, force_reg (from_mode, from), unsignedp);
1097 return;
1098 }
1099
1100 if (from_mode == DImode && to_mode == QImode)
1101 {
1102 #ifdef HAVE_truncdiqi2
1103 if (HAVE_truncdiqi2)
1104 {
1105 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1106 return;
1107 }
1108 #endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1111 }
1112
1113 if (from_mode == SImode && to_mode == HImode)
1114 {
1115 #ifdef HAVE_truncsihi2
1116 if (HAVE_truncsihi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1119 return;
1120 }
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == SImode && to_mode == QImode)
1127 {
1128 #ifdef HAVE_truncsiqi2
1129 if (HAVE_truncsiqi2)
1130 {
1131 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1132 return;
1133 }
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == HImode && to_mode == QImode)
1140 {
1141 #ifdef HAVE_trunchiqi2
1142 if (HAVE_trunchiqi2)
1143 {
1144 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1145 return;
1146 }
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == TImode && to_mode == DImode)
1153 {
1154 #ifdef HAVE_trunctidi2
1155 if (HAVE_trunctidi2)
1156 {
1157 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1158 return;
1159 }
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == TImode && to_mode == SImode)
1166 {
1167 #ifdef HAVE_trunctisi2
1168 if (HAVE_trunctisi2)
1169 {
1170 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1171 return;
1172 }
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
1178 if (from_mode == TImode && to_mode == HImode)
1179 {
1180 #ifdef HAVE_trunctihi2
1181 if (HAVE_trunctihi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1184 return;
1185 }
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
1191 if (from_mode == TImode && to_mode == QImode)
1192 {
1193 #ifdef HAVE_trunctiqi2
1194 if (HAVE_trunctiqi2)
1195 {
1196 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1197 return;
1198 }
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 /* Handle truncation of volatile memrefs, and so on;
1205 the things that couldn't be truncated directly,
1206 and for which there was no special instruction. */
1207 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1208 {
1209 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1210 emit_move_insn (to, temp);
1211 return;
1212 }
1213
1214 /* Mode combination is not recognized. */
1215 abort ();
1216 }
1217
1218 /* Return an rtx for a value that would result
1219 from converting X to mode MODE.
1220 Both X and MODE may be floating, or both integer.
1221 UNSIGNEDP is nonzero if X is an unsigned value.
1222 This can be done by referring to a part of X in place
1223 or by copying to a new temporary with conversion.
1224
1225 This function *must not* call protect_from_queue
1226 except when putting X into an insn (in which case convert_move does it). */
1227
1228 rtx
1229 convert_to_mode (mode, x, unsignedp)
1230 enum machine_mode mode;
1231 rtx x;
1232 int unsignedp;
1233 {
1234 return convert_modes (mode, VOIDmode, x, unsignedp);
1235 }
1236
1237 /* Return an rtx for a value that would result
1238 from converting X from mode OLDMODE to mode MODE.
1239 Both modes may be floating, or both integer.
1240 UNSIGNEDP is nonzero if X is an unsigned value.
1241
1242 This can be done by referring to a part of X in place
1243 or by copying to a new temporary with conversion.
1244
1245 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1246
1247 This function *must not* call protect_from_queue
1248 except when putting X into an insn (in which case convert_move does it). */
1249
1250 rtx
1251 convert_modes (mode, oldmode, x, unsignedp)
1252 enum machine_mode mode, oldmode;
1253 rtx x;
1254 int unsignedp;
1255 {
1256 register rtx temp;
1257
1258 /* If FROM is a SUBREG that indicates that we have already done at least
1259 the required extension, strip it. */
1260
1261 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1262 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1263 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1264 x = gen_lowpart (mode, x);
1265
1266 if (GET_MODE (x) != VOIDmode)
1267 oldmode = GET_MODE (x);
1268
1269 if (mode == oldmode)
1270 return x;
1271
1272 /* There is one case that we must handle specially: If we are converting
1273 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1274 we are to interpret the constant as unsigned, gen_lowpart will do
1275 the wrong if the constant appears negative. What we want to do is
1276 make the high-order word of the constant zero, not all ones. */
1277
1278 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1279 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1280 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1281 {
1282 HOST_WIDE_INT val = INTVAL (x);
1283
1284 if (oldmode != VOIDmode
1285 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1286 {
1287 int width = GET_MODE_BITSIZE (oldmode);
1288
1289 /* We need to zero extend VAL. */
1290 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1291 }
1292
1293 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1294 }
1295
1296 /* We can do this with a gen_lowpart if both desired and current modes
1297 are integer, and this is either a constant integer, a register, or a
1298 non-volatile MEM. Except for the constant case where MODE is no
1299 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1300
1301 if ((GET_CODE (x) == CONST_INT
1302 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1303 || (GET_MODE_CLASS (mode) == MODE_INT
1304 && GET_MODE_CLASS (oldmode) == MODE_INT
1305 && (GET_CODE (x) == CONST_DOUBLE
1306 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1307 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1308 && direct_load[(int) mode])
1309 || (GET_CODE (x) == REG
1310 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1311 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1312 {
1313 /* ?? If we don't know OLDMODE, we have to assume here that
1314 X does not need sign- or zero-extension. This may not be
1315 the case, but it's the best we can do. */
1316 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1317 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1318 {
1319 HOST_WIDE_INT val = INTVAL (x);
1320 int width = GET_MODE_BITSIZE (oldmode);
1321
1322 /* We must sign or zero-extend in this case. Start by
1323 zero-extending, then sign extend if we need to. */
1324 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1325 if (! unsignedp
1326 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1327 val |= (HOST_WIDE_INT) (-1) << width;
1328
1329 return GEN_INT (val);
1330 }
1331
1332 return gen_lowpart (mode, x);
1333 }
1334
1335 temp = gen_reg_rtx (mode);
1336 convert_move (temp, x, unsignedp);
1337 return temp;
1338 }
1339 \f
1340
1341 /* This macro is used to determine what the largest unit size that
1342 move_by_pieces can use is. */
1343
1344 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1345 move efficiently, as opposed to MOVE_MAX which is the maximum
1346 number of bhytes we can move with a single instruction. */
1347
1348 #ifndef MOVE_MAX_PIECES
1349 #define MOVE_MAX_PIECES MOVE_MAX
1350 #endif
1351
1352 /* Generate several move instructions to copy LEN bytes
1353 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1354 The caller must pass FROM and TO
1355 through protect_from_queue before calling.
1356 ALIGN (in bytes) is maximum alignment we can assume. */
1357
1358 void
1359 move_by_pieces (to, from, len, align)
1360 rtx to, from;
1361 int len, align;
1362 {
1363 struct move_by_pieces data;
1364 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1365 int max_size = MOVE_MAX_PIECES + 1;
1366 enum machine_mode mode = VOIDmode, tmode;
1367 enum insn_code icode;
1368
1369 data.offset = 0;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1372 data.to = to;
1373 data.from = from;
1374 data.autinc_to
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 data.autinc_from
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1381
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1384 data.reverse
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1387 data.len = len;
1388
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1391 data.to_readonly = RTX_UNCHANGING_P (to);
1392 data.from_readonly = RTX_UNCHANGING_P (from);
1393
1394 /* If copying requires more than two move insns,
1395 copy addresses to registers (to make displacements shorter)
1396 and use post-increment if available. */
1397 if (!(data.autinc_from && data.autinc_to)
1398 && move_by_pieces_ninsns (len, align) > 2)
1399 {
1400 /* Find the mode of the largest move... */
1401 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1402 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1403 if (GET_MODE_SIZE (tmode) < max_size)
1404 mode = tmode;
1405
1406 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1407 {
1408 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1409 data.autinc_from = 1;
1410 data.explicit_inc_from = -1;
1411 }
1412 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1413 {
1414 data.from_addr = copy_addr_to_reg (from_addr);
1415 data.autinc_from = 1;
1416 data.explicit_inc_from = 1;
1417 }
1418 if (!data.autinc_from && CONSTANT_P (from_addr))
1419 data.from_addr = copy_addr_to_reg (from_addr);
1420 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1421 {
1422 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1423 data.autinc_to = 1;
1424 data.explicit_inc_to = -1;
1425 }
1426 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1427 {
1428 data.to_addr = copy_addr_to_reg (to_addr);
1429 data.autinc_to = 1;
1430 data.explicit_inc_to = 1;
1431 }
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 }
1435
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1438 align = MOVE_MAX;
1439
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1442
1443 while (max_size > 1)
1444 {
1445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1447 if (GET_MODE_SIZE (tmode) < max_size)
1448 mode = tmode;
1449
1450 if (mode == VOIDmode)
1451 break;
1452
1453 icode = mov_optab->handlers[(int) mode].insn_code;
1454 if (icode != CODE_FOR_nothing
1455 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1456 GET_MODE_SIZE (mode)))
1457 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1458
1459 max_size = GET_MODE_SIZE (mode);
1460 }
1461
1462 /* The code above should have handled everything. */
1463 if (data.len > 0)
1464 abort ();
1465 }
1466
1467 /* Return number of insns required to move L bytes by pieces.
1468 ALIGN (in bytes) is maximum alignment we can assume. */
1469
1470 static int
1471 move_by_pieces_ninsns (l, align)
1472 unsigned int l;
1473 int align;
1474 {
1475 register int n_insns = 0;
1476 int max_size = MOVE_MAX + 1;
1477
1478 if (! SLOW_UNALIGNED_ACCESS
1479 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1480 align = MOVE_MAX;
1481
1482 while (max_size > 1)
1483 {
1484 enum machine_mode mode = VOIDmode, tmode;
1485 enum insn_code icode;
1486
1487 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1488 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1489 if (GET_MODE_SIZE (tmode) < max_size)
1490 mode = tmode;
1491
1492 if (mode == VOIDmode)
1493 break;
1494
1495 icode = mov_optab->handlers[(int) mode].insn_code;
1496 if (icode != CODE_FOR_nothing
1497 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1498 GET_MODE_SIZE (mode)))
1499 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1500
1501 max_size = GET_MODE_SIZE (mode);
1502 }
1503
1504 return n_insns;
1505 }
1506
1507 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1508 with move instructions for mode MODE. GENFUN is the gen_... function
1509 to make a move insn for that mode. DATA has all the other info. */
1510
1511 static void
1512 move_by_pieces_1 (genfun, mode, data)
1513 rtx (*genfun) PROTO ((rtx, ...));
1514 enum machine_mode mode;
1515 struct move_by_pieces *data;
1516 {
1517 register int size = GET_MODE_SIZE (mode);
1518 register rtx to1, from1;
1519
1520 while (data->len >= size)
1521 {
1522 if (data->reverse) data->offset -= size;
1523
1524 to1 = (data->autinc_to
1525 ? gen_rtx_MEM (mode, data->to_addr)
1526 : copy_rtx (change_address (data->to, mode,
1527 plus_constant (data->to_addr,
1528 data->offset))));
1529 MEM_IN_STRUCT_P (to1) = data->to_struct;
1530 RTX_UNCHANGING_P (to1) = data->to_readonly;
1531
1532 from1
1533 = (data->autinc_from
1534 ? gen_rtx_MEM (mode, data->from_addr)
1535 : copy_rtx (change_address (data->from, mode,
1536 plus_constant (data->from_addr,
1537 data->offset))));
1538 MEM_IN_STRUCT_P (from1) = data->from_struct;
1539 RTX_UNCHANGING_P (from1) = data->from_readonly;
1540
1541 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1542 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1543 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1544 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1545
1546 emit_insn ((*genfun) (to1, from1));
1547 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1548 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1549 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1550 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1551
1552 if (! data->reverse) data->offset += size;
1553
1554 data->len -= size;
1555 }
1556 }
1557 \f
1558 /* Emit code to move a block Y to a block X.
1559 This may be done with string-move instructions,
1560 with multiple scalar move instructions, or with a library call.
1561
1562 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1563 with mode BLKmode.
1564 SIZE is an rtx that says how long they are.
1565 ALIGN is the maximum alignment we can assume they have,
1566 measured in bytes.
1567
1568 Return the address of the new block, if memcpy is called and returns it,
1569 0 otherwise. */
1570
1571 rtx
1572 emit_block_move (x, y, size, align)
1573 rtx x, y;
1574 rtx size;
1575 int align;
1576 {
1577 rtx retval = 0;
1578 #ifdef TARGET_MEM_FUNCTIONS
1579 static tree fn;
1580 tree call_expr, arg_list;
1581 #endif
1582
1583 if (GET_MODE (x) != BLKmode)
1584 abort ();
1585
1586 if (GET_MODE (y) != BLKmode)
1587 abort ();
1588
1589 x = protect_from_queue (x, 1);
1590 y = protect_from_queue (y, 0);
1591 size = protect_from_queue (size, 0);
1592
1593 if (GET_CODE (x) != MEM)
1594 abort ();
1595 if (GET_CODE (y) != MEM)
1596 abort ();
1597 if (size == 0)
1598 abort ();
1599
1600 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1601 move_by_pieces (x, y, INTVAL (size), align);
1602 else
1603 {
1604 /* Try the most limited insn first, because there's no point
1605 including more than one in the machine description unless
1606 the more limited one has some advantage. */
1607
1608 rtx opalign = GEN_INT (align);
1609 enum machine_mode mode;
1610
1611 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1612 mode = GET_MODE_WIDER_MODE (mode))
1613 {
1614 enum insn_code code = movstr_optab[(int) mode];
1615 insn_operand_predicate_fn pred;
1616
1617 if (code != CODE_FOR_nothing
1618 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1619 here because if SIZE is less than the mode mask, as it is
1620 returned by the macro, it will definitely be less than the
1621 actual mode mask. */
1622 && ((GET_CODE (size) == CONST_INT
1623 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1624 <= (GET_MODE_MASK (mode) >> 1)))
1625 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1626 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1627 || (*pred) (x, BLKmode))
1628 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1629 || (*pred) (y, BLKmode))
1630 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1631 || (*pred) (opalign, VOIDmode)))
1632 {
1633 rtx op2;
1634 rtx last = get_last_insn ();
1635 rtx pat;
1636
1637 op2 = convert_to_mode (mode, size, 1);
1638 pred = insn_data[(int) code].operand[2].predicate;
1639 if (pred != 0 && ! (*pred) (op2, mode))
1640 op2 = copy_to_mode_reg (mode, op2);
1641
1642 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1643 if (pat)
1644 {
1645 emit_insn (pat);
1646 return 0;
1647 }
1648 else
1649 delete_insns_since (last);
1650 }
1651 }
1652
1653 /* X, Y, or SIZE may have been passed through protect_from_queue.
1654
1655 It is unsafe to save the value generated by protect_from_queue
1656 and reuse it later. Consider what happens if emit_queue is
1657 called before the return value from protect_from_queue is used.
1658
1659 Expansion of the CALL_EXPR below will call emit_queue before
1660 we are finished emitting RTL for argument setup. So if we are
1661 not careful we could get the wrong value for an argument.
1662
1663 To avoid this problem we go ahead and emit code to copy X, Y &
1664 SIZE into new pseudos. We can then place those new pseudos
1665 into an RTL_EXPR and use them later, even after a call to
1666 emit_queue.
1667
1668 Note this is not strictly needed for library calls since they
1669 do not call emit_queue before loading their arguments. However,
1670 we may need to have library calls call emit_queue in the future
1671 since failing to do so could cause problems for targets which
1672 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1673 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1674 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1675
1676 #ifdef TARGET_MEM_FUNCTIONS
1677 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1678 #else
1679 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1680 TREE_UNSIGNED (integer_type_node));
1681 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1682 #endif
1683
1684 #ifdef TARGET_MEM_FUNCTIONS
1685 /* It is incorrect to use the libcall calling conventions to call
1686 memcpy in this context.
1687
1688 This could be a user call to memcpy and the user may wish to
1689 examine the return value from memcpy.
1690
1691 For targets where libcalls and normal calls have different conventions
1692 for returning pointers, we could end up generating incorrect code.
1693
1694 So instead of using a libcall sequence we build up a suitable
1695 CALL_EXPR and expand the call in the normal fashion. */
1696 if (fn == NULL_TREE)
1697 {
1698 tree fntype;
1699
1700 /* This was copied from except.c, I don't know if all this is
1701 necessary in this context or not. */
1702 fn = get_identifier ("memcpy");
1703 push_obstacks_nochange ();
1704 end_temporary_allocation ();
1705 fntype = build_pointer_type (void_type_node);
1706 fntype = build_function_type (fntype, NULL_TREE);
1707 fn = build_decl (FUNCTION_DECL, fn, fntype);
1708 ggc_add_tree_root (&fn, 1);
1709 DECL_EXTERNAL (fn) = 1;
1710 TREE_PUBLIC (fn) = 1;
1711 DECL_ARTIFICIAL (fn) = 1;
1712 make_decl_rtl (fn, NULL_PTR, 1);
1713 assemble_external (fn);
1714 pop_obstacks ();
1715 }
1716
1717 /* We need to make an argument list for the function call.
1718
1719 memcpy has three arguments, the first two are void * addresses and
1720 the last is a size_t byte count for the copy. */
1721 arg_list
1722 = build_tree_list (NULL_TREE,
1723 make_tree (build_pointer_type (void_type_node), x));
1724 TREE_CHAIN (arg_list)
1725 = build_tree_list (NULL_TREE,
1726 make_tree (build_pointer_type (void_type_node), y));
1727 TREE_CHAIN (TREE_CHAIN (arg_list))
1728 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1729 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1730
1731 /* Now we have to build up the CALL_EXPR itself. */
1732 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1733 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1734 call_expr, arg_list, NULL_TREE);
1735 TREE_SIDE_EFFECTS (call_expr) = 1;
1736
1737 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1738 #else
1739 emit_library_call (bcopy_libfunc, 0,
1740 VOIDmode, 3, y, Pmode, x, Pmode,
1741 convert_to_mode (TYPE_MODE (integer_type_node), size,
1742 TREE_UNSIGNED (integer_type_node)),
1743 TYPE_MODE (integer_type_node));
1744 #endif
1745 }
1746
1747 return retval;
1748 }
1749 \f
1750 /* Copy all or part of a value X into registers starting at REGNO.
1751 The number of registers to be filled is NREGS. */
1752
1753 void
1754 move_block_to_reg (regno, x, nregs, mode)
1755 int regno;
1756 rtx x;
1757 int nregs;
1758 enum machine_mode mode;
1759 {
1760 int i;
1761 #ifdef HAVE_load_multiple
1762 rtx pat;
1763 rtx last;
1764 #endif
1765
1766 if (nregs == 0)
1767 return;
1768
1769 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1770 x = validize_mem (force_const_mem (mode, x));
1771
1772 /* See if the machine can do this with a load multiple insn. */
1773 #ifdef HAVE_load_multiple
1774 if (HAVE_load_multiple)
1775 {
1776 last = get_last_insn ();
1777 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1778 GEN_INT (nregs));
1779 if (pat)
1780 {
1781 emit_insn (pat);
1782 return;
1783 }
1784 else
1785 delete_insns_since (last);
1786 }
1787 #endif
1788
1789 for (i = 0; i < nregs; i++)
1790 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1791 operand_subword_force (x, i, mode));
1792 }
1793
1794 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1795 The number of registers to be filled is NREGS. SIZE indicates the number
1796 of bytes in the object X. */
1797
1798
1799 void
1800 move_block_from_reg (regno, x, nregs, size)
1801 int regno;
1802 rtx x;
1803 int nregs;
1804 int size;
1805 {
1806 int i;
1807 #ifdef HAVE_store_multiple
1808 rtx pat;
1809 rtx last;
1810 #endif
1811 enum machine_mode mode;
1812
1813 /* If SIZE is that of a mode no bigger than a word, just use that
1814 mode's store operation. */
1815 if (size <= UNITS_PER_WORD
1816 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1817 {
1818 emit_move_insn (change_address (x, mode, NULL),
1819 gen_rtx_REG (mode, regno));
1820 return;
1821 }
1822
1823 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1824 to the left before storing to memory. Note that the previous test
1825 doesn't handle all cases (e.g. SIZE == 3). */
1826 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1827 {
1828 rtx tem = operand_subword (x, 0, 1, BLKmode);
1829 rtx shift;
1830
1831 if (tem == 0)
1832 abort ();
1833
1834 shift = expand_shift (LSHIFT_EXPR, word_mode,
1835 gen_rtx_REG (word_mode, regno),
1836 build_int_2 ((UNITS_PER_WORD - size)
1837 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1838 emit_move_insn (tem, shift);
1839 return;
1840 }
1841
1842 /* See if the machine can do this with a store multiple insn. */
1843 #ifdef HAVE_store_multiple
1844 if (HAVE_store_multiple)
1845 {
1846 last = get_last_insn ();
1847 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1848 GEN_INT (nregs));
1849 if (pat)
1850 {
1851 emit_insn (pat);
1852 return;
1853 }
1854 else
1855 delete_insns_since (last);
1856 }
1857 #endif
1858
1859 for (i = 0; i < nregs; i++)
1860 {
1861 rtx tem = operand_subword (x, i, 1, BLKmode);
1862
1863 if (tem == 0)
1864 abort ();
1865
1866 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1867 }
1868 }
1869
1870 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1871 registers represented by a PARALLEL. SSIZE represents the total size of
1872 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1873 SRC in bits. */
1874 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1875 the balance will be in what would be the low-order memory addresses, i.e.
1876 left justified for big endian, right justified for little endian. This
1877 happens to be true for the targets currently using this support. If this
1878 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1879 would be needed. */
1880
1881 void
1882 emit_group_load (dst, orig_src, ssize, align)
1883 rtx dst, orig_src;
1884 int align, ssize;
1885 {
1886 rtx *tmps, src;
1887 int start, i;
1888
1889 if (GET_CODE (dst) != PARALLEL)
1890 abort ();
1891
1892 /* Check for a NULL entry, used to indicate that the parameter goes
1893 both on the stack and in registers. */
1894 if (XEXP (XVECEXP (dst, 0, 0), 0))
1895 start = 0;
1896 else
1897 start = 1;
1898
1899 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1900
1901 /* If we won't be loading directly from memory, protect the real source
1902 from strange tricks we might play. */
1903 src = orig_src;
1904 if (GET_CODE (src) != MEM)
1905 {
1906 src = gen_reg_rtx (GET_MODE (orig_src));
1907 emit_move_insn (src, orig_src);
1908 }
1909
1910 /* Process the pieces. */
1911 for (i = start; i < XVECLEN (dst, 0); i++)
1912 {
1913 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1914 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1915 int bytelen = GET_MODE_SIZE (mode);
1916 int shift = 0;
1917
1918 /* Handle trailing fragments that run over the size of the struct. */
1919 if (ssize >= 0 && bytepos + bytelen > ssize)
1920 {
1921 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1922 bytelen = ssize - bytepos;
1923 if (bytelen <= 0)
1924 abort();
1925 }
1926
1927 /* Optimize the access just a bit. */
1928 if (GET_CODE (src) == MEM
1929 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1930 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1931 && bytelen == GET_MODE_SIZE (mode))
1932 {
1933 tmps[i] = gen_reg_rtx (mode);
1934 emit_move_insn (tmps[i],
1935 change_address (src, mode,
1936 plus_constant (XEXP (src, 0),
1937 bytepos)));
1938 }
1939 else if (GET_CODE (src) == CONCAT)
1940 {
1941 if (bytepos == 0
1942 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1943 tmps[i] = XEXP (src, 0);
1944 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1945 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1946 tmps[i] = XEXP (src, 1);
1947 else
1948 abort ();
1949 }
1950 else
1951 {
1952 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1953 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1954 mode, mode, align, ssize);
1955 }
1956
1957 if (BYTES_BIG_ENDIAN && shift)
1958 {
1959 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1960 tmps[i], 0, OPTAB_WIDEN);
1961 }
1962 }
1963 emit_queue();
1964
1965 /* Copy the extracted pieces into the proper (probable) hard regs. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1967 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1968 }
1969
1970 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1971 registers represented by a PARALLEL. SSIZE represents the total size of
1972 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1973
1974 void
1975 emit_group_store (orig_dst, src, ssize, align)
1976 rtx orig_dst, src;
1977 int ssize, align;
1978 {
1979 rtx *tmps, dst;
1980 int start, i;
1981
1982 if (GET_CODE (src) != PARALLEL)
1983 abort ();
1984
1985 /* Check for a NULL entry, used to indicate that the parameter goes
1986 both on the stack and in registers. */
1987 if (XEXP (XVECEXP (src, 0, 0), 0))
1988 start = 0;
1989 else
1990 start = 1;
1991
1992 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1993
1994 /* Copy the (probable) hard regs into pseudos. */
1995 for (i = start; i < XVECLEN (src, 0); i++)
1996 {
1997 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1998 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1999 emit_move_insn (tmps[i], reg);
2000 }
2001 emit_queue();
2002
2003 /* If we won't be storing directly into memory, protect the real destination
2004 from strange tricks we might play. */
2005 dst = orig_dst;
2006 if (GET_CODE (dst) == PARALLEL)
2007 {
2008 rtx temp;
2009
2010 /* We can get a PARALLEL dst if there is a conditional expression in
2011 a return statement. In that case, the dst and src are the same,
2012 so no action is necessary. */
2013 if (rtx_equal_p (dst, src))
2014 return;
2015
2016 /* It is unclear if we can ever reach here, but we may as well handle
2017 it. Allocate a temporary, and split this into a store/load to/from
2018 the temporary. */
2019
2020 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2021 emit_group_store (temp, src, ssize, align);
2022 emit_group_load (dst, temp, ssize, align);
2023 return;
2024 }
2025 else if (GET_CODE (dst) != MEM)
2026 {
2027 dst = gen_reg_rtx (GET_MODE (orig_dst));
2028 /* Make life a bit easier for combine. */
2029 emit_move_insn (dst, const0_rtx);
2030 }
2031 else if (! MEM_IN_STRUCT_P (dst))
2032 {
2033 /* store_bit_field requires that memory operations have
2034 mem_in_struct_p set; we might not. */
2035
2036 dst = copy_rtx (orig_dst);
2037 MEM_SET_IN_STRUCT_P (dst, 1);
2038 }
2039
2040 /* Process the pieces. */
2041 for (i = start; i < XVECLEN (src, 0); i++)
2042 {
2043 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2044 enum machine_mode mode = GET_MODE (tmps[i]);
2045 int bytelen = GET_MODE_SIZE (mode);
2046
2047 /* Handle trailing fragments that run over the size of the struct. */
2048 if (ssize >= 0 && bytepos + bytelen > ssize)
2049 {
2050 if (BYTES_BIG_ENDIAN)
2051 {
2052 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2053 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2054 tmps[i], 0, OPTAB_WIDEN);
2055 }
2056 bytelen = ssize - bytepos;
2057 }
2058
2059 /* Optimize the access just a bit. */
2060 if (GET_CODE (dst) == MEM
2061 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2062 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2063 && bytelen == GET_MODE_SIZE (mode))
2064 {
2065 emit_move_insn (change_address (dst, mode,
2066 plus_constant (XEXP (dst, 0),
2067 bytepos)),
2068 tmps[i]);
2069 }
2070 else
2071 {
2072 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2073 mode, tmps[i], align, ssize);
2074 }
2075 }
2076 emit_queue();
2077
2078 /* Copy from the pseudo into the (probable) hard reg. */
2079 if (GET_CODE (dst) == REG)
2080 emit_move_insn (orig_dst, dst);
2081 }
2082
2083 /* Generate code to copy a BLKmode object of TYPE out of a
2084 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2085 is null, a stack temporary is created. TGTBLK is returned.
2086
2087 The primary purpose of this routine is to handle functions
2088 that return BLKmode structures in registers. Some machines
2089 (the PA for example) want to return all small structures
2090 in registers regardless of the structure's alignment.
2091 */
2092
2093 rtx
2094 copy_blkmode_from_reg(tgtblk,srcreg,type)
2095 rtx tgtblk;
2096 rtx srcreg;
2097 tree type;
2098 {
2099 int bytes = int_size_in_bytes (type);
2100 rtx src = NULL, dst = NULL;
2101 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2102 int bitpos, xbitpos, big_endian_correction = 0;
2103
2104 if (tgtblk == 0)
2105 {
2106 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2107 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2108 preserve_temp_slots (tgtblk);
2109 }
2110
2111 /* This code assumes srcreg is at least a full word. If it isn't,
2112 copy it into a new pseudo which is a full word. */
2113 if (GET_MODE (srcreg) != BLKmode
2114 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2115 srcreg = convert_to_mode (word_mode, srcreg,
2116 TREE_UNSIGNED (type));
2117
2118 /* Structures whose size is not a multiple of a word are aligned
2119 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2120 machine, this means we must skip the empty high order bytes when
2121 calculating the bit offset. */
2122 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2123 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2124 * BITS_PER_UNIT));
2125
2126 /* Copy the structure BITSIZE bites at a time.
2127
2128 We could probably emit more efficient code for machines
2129 which do not use strict alignment, but it doesn't seem
2130 worth the effort at the current time. */
2131 for (bitpos = 0, xbitpos = big_endian_correction;
2132 bitpos < bytes * BITS_PER_UNIT;
2133 bitpos += bitsize, xbitpos += bitsize)
2134 {
2135
2136 /* We need a new source operand each time xbitpos is on a
2137 word boundary and when xbitpos == big_endian_correction
2138 (the first time through). */
2139 if (xbitpos % BITS_PER_WORD == 0
2140 || xbitpos == big_endian_correction)
2141 src = operand_subword_force (srcreg,
2142 xbitpos / BITS_PER_WORD,
2143 BLKmode);
2144
2145 /* We need a new destination operand each time bitpos is on
2146 a word boundary. */
2147 if (bitpos % BITS_PER_WORD == 0)
2148 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2149
2150 /* Use xbitpos for the source extraction (right justified) and
2151 xbitpos for the destination store (left justified). */
2152 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2153 extract_bit_field (src, bitsize,
2154 xbitpos % BITS_PER_WORD, 1,
2155 NULL_RTX, word_mode,
2156 word_mode,
2157 bitsize / BITS_PER_UNIT,
2158 BITS_PER_WORD),
2159 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2160 }
2161 return tgtblk;
2162 }
2163
2164
2165 /* Add a USE expression for REG to the (possibly empty) list pointed
2166 to by CALL_FUSAGE. REG must denote a hard register. */
2167
2168 void
2169 use_reg (call_fusage, reg)
2170 rtx *call_fusage, reg;
2171 {
2172 if (GET_CODE (reg) != REG
2173 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2174 abort();
2175
2176 *call_fusage
2177 = gen_rtx_EXPR_LIST (VOIDmode,
2178 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2179 }
2180
2181 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2182 starting at REGNO. All of these registers must be hard registers. */
2183
2184 void
2185 use_regs (call_fusage, regno, nregs)
2186 rtx *call_fusage;
2187 int regno;
2188 int nregs;
2189 {
2190 int i;
2191
2192 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2193 abort ();
2194
2195 for (i = 0; i < nregs; i++)
2196 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2197 }
2198
2199 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2200 PARALLEL REGS. This is for calls that pass values in multiple
2201 non-contiguous locations. The Irix 6 ABI has examples of this. */
2202
2203 void
2204 use_group_regs (call_fusage, regs)
2205 rtx *call_fusage;
2206 rtx regs;
2207 {
2208 int i;
2209
2210 for (i = 0; i < XVECLEN (regs, 0); i++)
2211 {
2212 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2213
2214 /* A NULL entry means the parameter goes both on the stack and in
2215 registers. This can also be a MEM for targets that pass values
2216 partially on the stack and partially in registers. */
2217 if (reg != 0 && GET_CODE (reg) == REG)
2218 use_reg (call_fusage, reg);
2219 }
2220 }
2221 \f
2222 /* Generate several move instructions to clear LEN bytes of block TO.
2223 (A MEM rtx with BLKmode). The caller must pass TO through
2224 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2225 we can assume. */
2226
2227 static void
2228 clear_by_pieces (to, len, align)
2229 rtx to;
2230 int len, align;
2231 {
2232 struct clear_by_pieces data;
2233 rtx to_addr = XEXP (to, 0);
2234 int max_size = MOVE_MAX_PIECES + 1;
2235 enum machine_mode mode = VOIDmode, tmode;
2236 enum insn_code icode;
2237
2238 data.offset = 0;
2239 data.to_addr = to_addr;
2240 data.to = to;
2241 data.autinc_to
2242 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2243 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2244
2245 data.explicit_inc_to = 0;
2246 data.reverse
2247 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2248 if (data.reverse) data.offset = len;
2249 data.len = len;
2250
2251 data.to_struct = MEM_IN_STRUCT_P (to);
2252
2253 /* If copying requires more than two move insns,
2254 copy addresses to registers (to make displacements shorter)
2255 and use post-increment if available. */
2256 if (!data.autinc_to
2257 && move_by_pieces_ninsns (len, align) > 2)
2258 {
2259 /* Determine the main mode we'll be using */
2260 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2261 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2262 if (GET_MODE_SIZE (tmode) < max_size)
2263 mode = tmode;
2264
2265 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2266 {
2267 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2268 data.autinc_to = 1;
2269 data.explicit_inc_to = -1;
2270 }
2271 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2272 {
2273 data.to_addr = copy_addr_to_reg (to_addr);
2274 data.autinc_to = 1;
2275 data.explicit_inc_to = 1;
2276 }
2277 if (!data.autinc_to && CONSTANT_P (to_addr))
2278 data.to_addr = copy_addr_to_reg (to_addr);
2279 }
2280
2281 if (! SLOW_UNALIGNED_ACCESS
2282 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2283 align = MOVE_MAX;
2284
2285 /* First move what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2287
2288 while (max_size > 1)
2289 {
2290 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2291 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2292 if (GET_MODE_SIZE (tmode) < max_size)
2293 mode = tmode;
2294
2295 if (mode == VOIDmode)
2296 break;
2297
2298 icode = mov_optab->handlers[(int) mode].insn_code;
2299 if (icode != CODE_FOR_nothing
2300 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2301 GET_MODE_SIZE (mode)))
2302 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2303
2304 max_size = GET_MODE_SIZE (mode);
2305 }
2306
2307 /* The code above should have handled everything. */
2308 if (data.len != 0)
2309 abort ();
2310 }
2311
2312 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2313 with move instructions for mode MODE. GENFUN is the gen_... function
2314 to make a move insn for that mode. DATA has all the other info. */
2315
2316 static void
2317 clear_by_pieces_1 (genfun, mode, data)
2318 rtx (*genfun) PROTO ((rtx, ...));
2319 enum machine_mode mode;
2320 struct clear_by_pieces *data;
2321 {
2322 register int size = GET_MODE_SIZE (mode);
2323 register rtx to1;
2324
2325 while (data->len >= size)
2326 {
2327 if (data->reverse) data->offset -= size;
2328
2329 to1 = (data->autinc_to
2330 ? gen_rtx_MEM (mode, data->to_addr)
2331 : copy_rtx (change_address (data->to, mode,
2332 plus_constant (data->to_addr,
2333 data->offset))));
2334 MEM_IN_STRUCT_P (to1) = data->to_struct;
2335
2336 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2337 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2338
2339 emit_insn ((*genfun) (to1, const0_rtx));
2340 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2341 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2342
2343 if (! data->reverse) data->offset += size;
2344
2345 data->len -= size;
2346 }
2347 }
2348 \f
2349 /* Write zeros through the storage of OBJECT.
2350 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2351 the maximum alignment we can is has, measured in bytes.
2352
2353 If we call a function that returns the length of the block, return it. */
2354
2355 rtx
2356 clear_storage (object, size, align)
2357 rtx object;
2358 rtx size;
2359 int align;
2360 {
2361 #ifdef TARGET_MEM_FUNCTIONS
2362 static tree fn;
2363 tree call_expr, arg_list;
2364 #endif
2365 rtx retval = 0;
2366
2367 if (GET_MODE (object) == BLKmode)
2368 {
2369 object = protect_from_queue (object, 1);
2370 size = protect_from_queue (size, 0);
2371
2372 if (GET_CODE (size) == CONST_INT
2373 && MOVE_BY_PIECES_P (INTVAL (size), align))
2374 clear_by_pieces (object, INTVAL (size), align);
2375
2376 else
2377 {
2378 /* Try the most limited insn first, because there's no point
2379 including more than one in the machine description unless
2380 the more limited one has some advantage. */
2381
2382 rtx opalign = GEN_INT (align);
2383 enum machine_mode mode;
2384
2385 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2386 mode = GET_MODE_WIDER_MODE (mode))
2387 {
2388 enum insn_code code = clrstr_optab[(int) mode];
2389 insn_operand_predicate_fn pred;
2390
2391 if (code != CODE_FOR_nothing
2392 /* We don't need MODE to be narrower than
2393 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2394 the mode mask, as it is returned by the macro, it will
2395 definitely be less than the actual mode mask. */
2396 && ((GET_CODE (size) == CONST_INT
2397 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2398 <= (GET_MODE_MASK (mode) >> 1)))
2399 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2400 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2401 || (*pred) (object, BLKmode))
2402 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2403 || (*pred) (opalign, VOIDmode)))
2404 {
2405 rtx op1;
2406 rtx last = get_last_insn ();
2407 rtx pat;
2408
2409 op1 = convert_to_mode (mode, size, 1);
2410 pred = insn_data[(int) code].operand[1].predicate;
2411 if (pred != 0 && ! (*pred) (op1, mode))
2412 op1 = copy_to_mode_reg (mode, op1);
2413
2414 pat = GEN_FCN ((int) code) (object, op1, opalign);
2415 if (pat)
2416 {
2417 emit_insn (pat);
2418 return 0;
2419 }
2420 else
2421 delete_insns_since (last);
2422 }
2423 }
2424
2425 /* OBJECT or SIZE may have been passed through protect_from_queue.
2426
2427 It is unsafe to save the value generated by protect_from_queue
2428 and reuse it later. Consider what happens if emit_queue is
2429 called before the return value from protect_from_queue is used.
2430
2431 Expansion of the CALL_EXPR below will call emit_queue before
2432 we are finished emitting RTL for argument setup. So if we are
2433 not careful we could get the wrong value for an argument.
2434
2435 To avoid this problem we go ahead and emit code to copy OBJECT
2436 and SIZE into new pseudos. We can then place those new pseudos
2437 into an RTL_EXPR and use them later, even after a call to
2438 emit_queue.
2439
2440 Note this is not strictly needed for library calls since they
2441 do not call emit_queue before loading their arguments. However,
2442 we may need to have library calls call emit_queue in the future
2443 since failing to do so could cause problems for targets which
2444 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2445 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2446
2447 #ifdef TARGET_MEM_FUNCTIONS
2448 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2449 #else
2450 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2451 TREE_UNSIGNED (integer_type_node));
2452 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2453 #endif
2454
2455
2456 #ifdef TARGET_MEM_FUNCTIONS
2457 /* It is incorrect to use the libcall calling conventions to call
2458 memset in this context.
2459
2460 This could be a user call to memset and the user may wish to
2461 examine the return value from memset.
2462
2463 For targets where libcalls and normal calls have different
2464 conventions for returning pointers, we could end up generating
2465 incorrect code.
2466
2467 So instead of using a libcall sequence we build up a suitable
2468 CALL_EXPR and expand the call in the normal fashion. */
2469 if (fn == NULL_TREE)
2470 {
2471 tree fntype;
2472
2473 /* This was copied from except.c, I don't know if all this is
2474 necessary in this context or not. */
2475 fn = get_identifier ("memset");
2476 push_obstacks_nochange ();
2477 end_temporary_allocation ();
2478 fntype = build_pointer_type (void_type_node);
2479 fntype = build_function_type (fntype, NULL_TREE);
2480 fn = build_decl (FUNCTION_DECL, fn, fntype);
2481 ggc_add_tree_root (&fn, 1);
2482 DECL_EXTERNAL (fn) = 1;
2483 TREE_PUBLIC (fn) = 1;
2484 DECL_ARTIFICIAL (fn) = 1;
2485 make_decl_rtl (fn, NULL_PTR, 1);
2486 assemble_external (fn);
2487 pop_obstacks ();
2488 }
2489
2490 /* We need to make an argument list for the function call.
2491
2492 memset has three arguments, the first is a void * addresses, the
2493 second a integer with the initialization value, the last is a
2494 size_t byte count for the copy. */
2495 arg_list
2496 = build_tree_list (NULL_TREE,
2497 make_tree (build_pointer_type (void_type_node),
2498 object));
2499 TREE_CHAIN (arg_list)
2500 = build_tree_list (NULL_TREE,
2501 make_tree (integer_type_node, const0_rtx));
2502 TREE_CHAIN (TREE_CHAIN (arg_list))
2503 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2504 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2505
2506 /* Now we have to build up the CALL_EXPR itself. */
2507 call_expr = build1 (ADDR_EXPR,
2508 build_pointer_type (TREE_TYPE (fn)), fn);
2509 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2510 call_expr, arg_list, NULL_TREE);
2511 TREE_SIDE_EFFECTS (call_expr) = 1;
2512
2513 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2514 #else
2515 emit_library_call (bzero_libfunc, 0,
2516 VOIDmode, 2, object, Pmode, size,
2517 TYPE_MODE (integer_type_node));
2518 #endif
2519 }
2520 }
2521 else
2522 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2523
2524 return retval;
2525 }
2526
2527 /* Generate code to copy Y into X.
2528 Both Y and X must have the same mode, except that
2529 Y can be a constant with VOIDmode.
2530 This mode cannot be BLKmode; use emit_block_move for that.
2531
2532 Return the last instruction emitted. */
2533
2534 rtx
2535 emit_move_insn (x, y)
2536 rtx x, y;
2537 {
2538 enum machine_mode mode = GET_MODE (x);
2539
2540 x = protect_from_queue (x, 1);
2541 y = protect_from_queue (y, 0);
2542
2543 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2544 abort ();
2545
2546 /* Never force constant_p_rtx to memory. */
2547 if (GET_CODE (y) == CONSTANT_P_RTX)
2548 ;
2549 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2550 y = force_const_mem (mode, y);
2551
2552 /* If X or Y are memory references, verify that their addresses are valid
2553 for the machine. */
2554 if (GET_CODE (x) == MEM
2555 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2556 && ! push_operand (x, GET_MODE (x)))
2557 || (flag_force_addr
2558 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2559 x = change_address (x, VOIDmode, XEXP (x, 0));
2560
2561 if (GET_CODE (y) == MEM
2562 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2563 || (flag_force_addr
2564 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2565 y = change_address (y, VOIDmode, XEXP (y, 0));
2566
2567 if (mode == BLKmode)
2568 abort ();
2569
2570 return emit_move_insn_1 (x, y);
2571 }
2572
2573 /* Low level part of emit_move_insn.
2574 Called just like emit_move_insn, but assumes X and Y
2575 are basically valid. */
2576
2577 rtx
2578 emit_move_insn_1 (x, y)
2579 rtx x, y;
2580 {
2581 enum machine_mode mode = GET_MODE (x);
2582 enum machine_mode submode;
2583 enum mode_class class = GET_MODE_CLASS (mode);
2584 int i;
2585
2586 if (mode >= MAX_MACHINE_MODE)
2587 abort ();
2588
2589 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2590 return
2591 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2592
2593 /* Expand complex moves by moving real part and imag part, if possible. */
2594 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2595 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2596 * BITS_PER_UNIT),
2597 (class == MODE_COMPLEX_INT
2598 ? MODE_INT : MODE_FLOAT),
2599 0))
2600 && (mov_optab->handlers[(int) submode].insn_code
2601 != CODE_FOR_nothing))
2602 {
2603 /* Don't split destination if it is a stack push. */
2604 int stack = push_operand (x, GET_MODE (x));
2605
2606 /* If this is a stack, push the highpart first, so it
2607 will be in the argument order.
2608
2609 In that case, change_address is used only to convert
2610 the mode, not to change the address. */
2611 if (stack)
2612 {
2613 /* Note that the real part always precedes the imag part in memory
2614 regardless of machine's endianness. */
2615 #ifdef STACK_GROWS_DOWNWARD
2616 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2617 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2618 gen_imagpart (submode, y)));
2619 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2620 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2621 gen_realpart (submode, y)));
2622 #else
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2625 gen_realpart (submode, y)));
2626 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2627 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2628 gen_imagpart (submode, y)));
2629 #endif
2630 }
2631 else
2632 {
2633 /* Show the output dies here. This is necessary for pseudos;
2634 hard regs shouldn't appear here except as return values.
2635 We never want to emit such a clobber after reload. */
2636 if (x != y
2637 && ! (reload_in_progress || reload_completed))
2638 {
2639 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2640 }
2641
2642 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2643 (gen_realpart (submode, x), gen_realpart (submode, y)));
2644 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2645 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2646 }
2647
2648 return get_last_insn ();
2649 }
2650
2651 /* This will handle any multi-word mode that lacks a move_insn pattern.
2652 However, you will get better code if you define such patterns,
2653 even if they must turn into multiple assembler instructions. */
2654 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2655 {
2656 rtx last_insn = 0;
2657
2658 #ifdef PUSH_ROUNDING
2659
2660 /* If X is a push on the stack, do the push now and replace
2661 X with a reference to the stack pointer. */
2662 if (push_operand (x, GET_MODE (x)))
2663 {
2664 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2665 x = change_address (x, VOIDmode, stack_pointer_rtx);
2666 }
2667 #endif
2668
2669 /* Show the output dies here. This is necessary for pseudos;
2670 hard regs shouldn't appear here except as return values.
2671 We never want to emit such a clobber after reload. */
2672 if (x != y
2673 && ! (reload_in_progress || reload_completed))
2674 {
2675 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2676 }
2677
2678 for (i = 0;
2679 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2680 i++)
2681 {
2682 rtx xpart = operand_subword (x, i, 1, mode);
2683 rtx ypart = operand_subword (y, i, 1, mode);
2684
2685 /* If we can't get a part of Y, put Y into memory if it is a
2686 constant. Otherwise, force it into a register. If we still
2687 can't get a part of Y, abort. */
2688 if (ypart == 0 && CONSTANT_P (y))
2689 {
2690 y = force_const_mem (mode, y);
2691 ypart = operand_subword (y, i, 1, mode);
2692 }
2693 else if (ypart == 0)
2694 ypart = operand_subword_force (y, i, mode);
2695
2696 if (xpart == 0 || ypart == 0)
2697 abort ();
2698
2699 last_insn = emit_move_insn (xpart, ypart);
2700 }
2701
2702 return last_insn;
2703 }
2704 else
2705 abort ();
2706 }
2707 \f
2708 /* Pushing data onto the stack. */
2709
2710 /* Push a block of length SIZE (perhaps variable)
2711 and return an rtx to address the beginning of the block.
2712 Note that it is not possible for the value returned to be a QUEUED.
2713 The value may be virtual_outgoing_args_rtx.
2714
2715 EXTRA is the number of bytes of padding to push in addition to SIZE.
2716 BELOW nonzero means this padding comes at low addresses;
2717 otherwise, the padding comes at high addresses. */
2718
2719 rtx
2720 push_block (size, extra, below)
2721 rtx size;
2722 int extra, below;
2723 {
2724 register rtx temp;
2725
2726 size = convert_modes (Pmode, ptr_mode, size, 1);
2727 if (CONSTANT_P (size))
2728 anti_adjust_stack (plus_constant (size, extra));
2729 else if (GET_CODE (size) == REG && extra == 0)
2730 anti_adjust_stack (size);
2731 else
2732 {
2733 rtx temp = copy_to_mode_reg (Pmode, size);
2734 if (extra != 0)
2735 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2736 temp, 0, OPTAB_LIB_WIDEN);
2737 anti_adjust_stack (temp);
2738 }
2739
2740 #if defined (STACK_GROWS_DOWNWARD) \
2741 || (defined (ARGS_GROW_DOWNWARD) \
2742 && !defined (ACCUMULATE_OUTGOING_ARGS))
2743
2744 /* Return the lowest stack address when STACK or ARGS grow downward and
2745 we are not aaccumulating outgoing arguments (the c4x port uses such
2746 conventions). */
2747 temp = virtual_outgoing_args_rtx;
2748 if (extra != 0 && below)
2749 temp = plus_constant (temp, extra);
2750 #else
2751 if (GET_CODE (size) == CONST_INT)
2752 temp = plus_constant (virtual_outgoing_args_rtx,
2753 - INTVAL (size) - (below ? 0 : extra));
2754 else if (extra != 0 && !below)
2755 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2756 negate_rtx (Pmode, plus_constant (size, extra)));
2757 else
2758 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2759 negate_rtx (Pmode, size));
2760 #endif
2761
2762 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2763 }
2764
2765 rtx
2766 gen_push_operand ()
2767 {
2768 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2769 }
2770
2771 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2772 block of SIZE bytes. */
2773
2774 static rtx
2775 get_push_address (size)
2776 int size;
2777 {
2778 register rtx temp;
2779
2780 if (STACK_PUSH_CODE == POST_DEC)
2781 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2782 else if (STACK_PUSH_CODE == POST_INC)
2783 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2784 else
2785 temp = stack_pointer_rtx;
2786
2787 return copy_to_reg (temp);
2788 }
2789
2790 /* Generate code to push X onto the stack, assuming it has mode MODE and
2791 type TYPE.
2792 MODE is redundant except when X is a CONST_INT (since they don't
2793 carry mode info).
2794 SIZE is an rtx for the size of data to be copied (in bytes),
2795 needed only if X is BLKmode.
2796
2797 ALIGN (in bytes) is maximum alignment we can assume.
2798
2799 If PARTIAL and REG are both nonzero, then copy that many of the first
2800 words of X into registers starting with REG, and push the rest of X.
2801 The amount of space pushed is decreased by PARTIAL words,
2802 rounded *down* to a multiple of PARM_BOUNDARY.
2803 REG must be a hard register in this case.
2804 If REG is zero but PARTIAL is not, take any all others actions for an
2805 argument partially in registers, but do not actually load any
2806 registers.
2807
2808 EXTRA is the amount in bytes of extra space to leave next to this arg.
2809 This is ignored if an argument block has already been allocated.
2810
2811 On a machine that lacks real push insns, ARGS_ADDR is the address of
2812 the bottom of the argument block for this call. We use indexing off there
2813 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2814 argument block has not been preallocated.
2815
2816 ARGS_SO_FAR is the size of args previously pushed for this call.
2817
2818 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2819 for arguments passed in registers. If nonzero, it will be the number
2820 of bytes required. */
2821
2822 void
2823 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2824 args_addr, args_so_far, reg_parm_stack_space)
2825 register rtx x;
2826 enum machine_mode mode;
2827 tree type;
2828 rtx size;
2829 int align;
2830 int partial;
2831 rtx reg;
2832 int extra;
2833 rtx args_addr;
2834 rtx args_so_far;
2835 int reg_parm_stack_space;
2836 {
2837 rtx xinner;
2838 enum direction stack_direction
2839 #ifdef STACK_GROWS_DOWNWARD
2840 = downward;
2841 #else
2842 = upward;
2843 #endif
2844
2845 /* Decide where to pad the argument: `downward' for below,
2846 `upward' for above, or `none' for don't pad it.
2847 Default is below for small data on big-endian machines; else above. */
2848 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2849
2850 /* Invert direction if stack is post-update. */
2851 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2852 if (where_pad != none)
2853 where_pad = (where_pad == downward ? upward : downward);
2854
2855 xinner = x = protect_from_queue (x, 0);
2856
2857 if (mode == BLKmode)
2858 {
2859 /* Copy a block into the stack, entirely or partially. */
2860
2861 register rtx temp;
2862 int used = partial * UNITS_PER_WORD;
2863 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2864 int skip;
2865
2866 if (size == 0)
2867 abort ();
2868
2869 used -= offset;
2870
2871 /* USED is now the # of bytes we need not copy to the stack
2872 because registers will take care of them. */
2873
2874 if (partial != 0)
2875 xinner = change_address (xinner, BLKmode,
2876 plus_constant (XEXP (xinner, 0), used));
2877
2878 /* If the partial register-part of the arg counts in its stack size,
2879 skip the part of stack space corresponding to the registers.
2880 Otherwise, start copying to the beginning of the stack space,
2881 by setting SKIP to 0. */
2882 skip = (reg_parm_stack_space == 0) ? 0 : used;
2883
2884 #ifdef PUSH_ROUNDING
2885 /* Do it with several push insns if that doesn't take lots of insns
2886 and if there is no difficulty with push insns that skip bytes
2887 on the stack for alignment purposes. */
2888 if (args_addr == 0
2889 && GET_CODE (size) == CONST_INT
2890 && skip == 0
2891 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2892 /* Here we avoid the case of a structure whose weak alignment
2893 forces many pushes of a small amount of data,
2894 and such small pushes do rounding that causes trouble. */
2895 && ((! SLOW_UNALIGNED_ACCESS)
2896 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2897 || PUSH_ROUNDING (align) == align)
2898 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2899 {
2900 /* Push padding now if padding above and stack grows down,
2901 or if padding below and stack grows up.
2902 But if space already allocated, this has already been done. */
2903 if (extra && args_addr == 0
2904 && where_pad != none && where_pad != stack_direction)
2905 anti_adjust_stack (GEN_INT (extra));
2906
2907 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2908 INTVAL (size) - used, align);
2909
2910 if (current_function_check_memory_usage && ! in_check_memory_usage)
2911 {
2912 rtx temp;
2913
2914 in_check_memory_usage = 1;
2915 temp = get_push_address (INTVAL(size) - used);
2916 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2917 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2918 temp, Pmode,
2919 XEXP (xinner, 0), Pmode,
2920 GEN_INT (INTVAL(size) - used),
2921 TYPE_MODE (sizetype));
2922 else
2923 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2924 temp, Pmode,
2925 GEN_INT (INTVAL(size) - used),
2926 TYPE_MODE (sizetype),
2927 GEN_INT (MEMORY_USE_RW),
2928 TYPE_MODE (integer_type_node));
2929 in_check_memory_usage = 0;
2930 }
2931 }
2932 else
2933 #endif /* PUSH_ROUNDING */
2934 {
2935 /* Otherwise make space on the stack and copy the data
2936 to the address of that space. */
2937
2938 /* Deduct words put into registers from the size we must copy. */
2939 if (partial != 0)
2940 {
2941 if (GET_CODE (size) == CONST_INT)
2942 size = GEN_INT (INTVAL (size) - used);
2943 else
2944 size = expand_binop (GET_MODE (size), sub_optab, size,
2945 GEN_INT (used), NULL_RTX, 0,
2946 OPTAB_LIB_WIDEN);
2947 }
2948
2949 /* Get the address of the stack space.
2950 In this case, we do not deal with EXTRA separately.
2951 A single stack adjust will do. */
2952 if (! args_addr)
2953 {
2954 temp = push_block (size, extra, where_pad == downward);
2955 extra = 0;
2956 }
2957 else if (GET_CODE (args_so_far) == CONST_INT)
2958 temp = memory_address (BLKmode,
2959 plus_constant (args_addr,
2960 skip + INTVAL (args_so_far)));
2961 else
2962 temp = memory_address (BLKmode,
2963 plus_constant (gen_rtx_PLUS (Pmode,
2964 args_addr,
2965 args_so_far),
2966 skip));
2967 if (current_function_check_memory_usage && ! in_check_memory_usage)
2968 {
2969 rtx target;
2970
2971 in_check_memory_usage = 1;
2972 target = copy_to_reg (temp);
2973 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2974 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2975 target, Pmode,
2976 XEXP (xinner, 0), Pmode,
2977 size, TYPE_MODE (sizetype));
2978 else
2979 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2980 target, Pmode,
2981 size, TYPE_MODE (sizetype),
2982 GEN_INT (MEMORY_USE_RW),
2983 TYPE_MODE (integer_type_node));
2984 in_check_memory_usage = 0;
2985 }
2986
2987 /* TEMP is the address of the block. Copy the data there. */
2988 if (GET_CODE (size) == CONST_INT
2989 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
2990 {
2991 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2992 INTVAL (size), align);
2993 goto ret;
2994 }
2995 else
2996 {
2997 rtx opalign = GEN_INT (align);
2998 enum machine_mode mode;
2999 rtx target = gen_rtx_MEM (BLKmode, temp);
3000
3001 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3002 mode != VOIDmode;
3003 mode = GET_MODE_WIDER_MODE (mode))
3004 {
3005 enum insn_code code = movstr_optab[(int) mode];
3006 insn_operand_predicate_fn pred;
3007
3008 if (code != CODE_FOR_nothing
3009 && ((GET_CODE (size) == CONST_INT
3010 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3011 <= (GET_MODE_MASK (mode) >> 1)))
3012 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3013 && (!(pred = insn_data[(int) code].operand[0].predicate)
3014 || ((*pred) (target, BLKmode)))
3015 && (!(pred = insn_data[(int) code].operand[1].predicate)
3016 || ((*pred) (xinner, BLKmode)))
3017 && (!(pred = insn_data[(int) code].operand[3].predicate)
3018 || ((*pred) (opalign, VOIDmode))))
3019 {
3020 rtx op2 = convert_to_mode (mode, size, 1);
3021 rtx last = get_last_insn ();
3022 rtx pat;
3023
3024 pred = insn_data[(int) code].operand[2].predicate;
3025 if (pred != 0 && ! (*pred) (op2, mode))
3026 op2 = copy_to_mode_reg (mode, op2);
3027
3028 pat = GEN_FCN ((int) code) (target, xinner,
3029 op2, opalign);
3030 if (pat)
3031 {
3032 emit_insn (pat);
3033 goto ret;
3034 }
3035 else
3036 delete_insns_since (last);
3037 }
3038 }
3039 }
3040
3041 #ifndef ACCUMULATE_OUTGOING_ARGS
3042 /* If the source is referenced relative to the stack pointer,
3043 copy it to another register to stabilize it. We do not need
3044 to do this if we know that we won't be changing sp. */
3045
3046 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3047 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3048 temp = copy_to_reg (temp);
3049 #endif
3050
3051 /* Make inhibit_defer_pop nonzero around the library call
3052 to force it to pop the bcopy-arguments right away. */
3053 NO_DEFER_POP;
3054 #ifdef TARGET_MEM_FUNCTIONS
3055 emit_library_call (memcpy_libfunc, 0,
3056 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3057 convert_to_mode (TYPE_MODE (sizetype),
3058 size, TREE_UNSIGNED (sizetype)),
3059 TYPE_MODE (sizetype));
3060 #else
3061 emit_library_call (bcopy_libfunc, 0,
3062 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3063 convert_to_mode (TYPE_MODE (integer_type_node),
3064 size,
3065 TREE_UNSIGNED (integer_type_node)),
3066 TYPE_MODE (integer_type_node));
3067 #endif
3068 OK_DEFER_POP;
3069 }
3070 }
3071 else if (partial > 0)
3072 {
3073 /* Scalar partly in registers. */
3074
3075 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3076 int i;
3077 int not_stack;
3078 /* # words of start of argument
3079 that we must make space for but need not store. */
3080 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3081 int args_offset = INTVAL (args_so_far);
3082 int skip;
3083
3084 /* Push padding now if padding above and stack grows down,
3085 or if padding below and stack grows up.
3086 But if space already allocated, this has already been done. */
3087 if (extra && args_addr == 0
3088 && where_pad != none && where_pad != stack_direction)
3089 anti_adjust_stack (GEN_INT (extra));
3090
3091 /* If we make space by pushing it, we might as well push
3092 the real data. Otherwise, we can leave OFFSET nonzero
3093 and leave the space uninitialized. */
3094 if (args_addr == 0)
3095 offset = 0;
3096
3097 /* Now NOT_STACK gets the number of words that we don't need to
3098 allocate on the stack. */
3099 not_stack = partial - offset;
3100
3101 /* If the partial register-part of the arg counts in its stack size,
3102 skip the part of stack space corresponding to the registers.
3103 Otherwise, start copying to the beginning of the stack space,
3104 by setting SKIP to 0. */
3105 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3106
3107 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3108 x = validize_mem (force_const_mem (mode, x));
3109
3110 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3111 SUBREGs of such registers are not allowed. */
3112 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3113 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3114 x = copy_to_reg (x);
3115
3116 /* Loop over all the words allocated on the stack for this arg. */
3117 /* We can do it by words, because any scalar bigger than a word
3118 has a size a multiple of a word. */
3119 #ifndef PUSH_ARGS_REVERSED
3120 for (i = not_stack; i < size; i++)
3121 #else
3122 for (i = size - 1; i >= not_stack; i--)
3123 #endif
3124 if (i >= not_stack + offset)
3125 emit_push_insn (operand_subword_force (x, i, mode),
3126 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3127 0, args_addr,
3128 GEN_INT (args_offset + ((i - not_stack + skip)
3129 * UNITS_PER_WORD)),
3130 reg_parm_stack_space);
3131 }
3132 else
3133 {
3134 rtx addr;
3135 rtx target = NULL_RTX;
3136
3137 /* Push padding now if padding above and stack grows down,
3138 or if padding below and stack grows up.
3139 But if space already allocated, this has already been done. */
3140 if (extra && args_addr == 0
3141 && where_pad != none && where_pad != stack_direction)
3142 anti_adjust_stack (GEN_INT (extra));
3143
3144 #ifdef PUSH_ROUNDING
3145 if (args_addr == 0)
3146 addr = gen_push_operand ();
3147 else
3148 #endif
3149 {
3150 if (GET_CODE (args_so_far) == CONST_INT)
3151 addr
3152 = memory_address (mode,
3153 plus_constant (args_addr,
3154 INTVAL (args_so_far)));
3155 else
3156 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3157 args_so_far));
3158 target = addr;
3159 }
3160
3161 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3162
3163 if (current_function_check_memory_usage && ! in_check_memory_usage)
3164 {
3165 in_check_memory_usage = 1;
3166 if (target == 0)
3167 target = get_push_address (GET_MODE_SIZE (mode));
3168
3169 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3170 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3171 target, Pmode,
3172 XEXP (x, 0), Pmode,
3173 GEN_INT (GET_MODE_SIZE (mode)),
3174 TYPE_MODE (sizetype));
3175 else
3176 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3177 target, Pmode,
3178 GEN_INT (GET_MODE_SIZE (mode)),
3179 TYPE_MODE (sizetype),
3180 GEN_INT (MEMORY_USE_RW),
3181 TYPE_MODE (integer_type_node));
3182 in_check_memory_usage = 0;
3183 }
3184 }
3185
3186 ret:
3187 /* If part should go in registers, copy that part
3188 into the appropriate registers. Do this now, at the end,
3189 since mem-to-mem copies above may do function calls. */
3190 if (partial > 0 && reg != 0)
3191 {
3192 /* Handle calls that pass values in multiple non-contiguous locations.
3193 The Irix 6 ABI has examples of this. */
3194 if (GET_CODE (reg) == PARALLEL)
3195 emit_group_load (reg, x, -1, align); /* ??? size? */
3196 else
3197 move_block_to_reg (REGNO (reg), x, partial, mode);
3198 }
3199
3200 if (extra && args_addr == 0 && where_pad == stack_direction)
3201 anti_adjust_stack (GEN_INT (extra));
3202 }
3203 \f
3204 /* Expand an assignment that stores the value of FROM into TO.
3205 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3206 (This may contain a QUEUED rtx;
3207 if the value is constant, this rtx is a constant.)
3208 Otherwise, the returned value is NULL_RTX.
3209
3210 SUGGEST_REG is no longer actually used.
3211 It used to mean, copy the value through a register
3212 and return that register, if that is possible.
3213 We now use WANT_VALUE to decide whether to do this. */
3214
3215 rtx
3216 expand_assignment (to, from, want_value, suggest_reg)
3217 tree to, from;
3218 int want_value;
3219 int suggest_reg ATTRIBUTE_UNUSED;
3220 {
3221 register rtx to_rtx = 0;
3222 rtx result;
3223
3224 /* Don't crash if the lhs of the assignment was erroneous. */
3225
3226 if (TREE_CODE (to) == ERROR_MARK)
3227 {
3228 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3229 return want_value ? result : NULL_RTX;
3230 }
3231
3232 /* Assignment of a structure component needs special treatment
3233 if the structure component's rtx is not simply a MEM.
3234 Assignment of an array element at a constant index, and assignment of
3235 an array element in an unaligned packed structure field, has the same
3236 problem. */
3237
3238 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3239 || TREE_CODE (to) == ARRAY_REF)
3240 {
3241 enum machine_mode mode1;
3242 int bitsize;
3243 int bitpos;
3244 tree offset;
3245 int unsignedp;
3246 int volatilep = 0;
3247 tree tem;
3248 int alignment;
3249
3250 push_temp_slots ();
3251 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3252 &unsignedp, &volatilep, &alignment);
3253
3254 /* If we are going to use store_bit_field and extract_bit_field,
3255 make sure to_rtx will be safe for multiple use. */
3256
3257 if (mode1 == VOIDmode && want_value)
3258 tem = stabilize_reference (tem);
3259
3260 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3261 if (offset != 0)
3262 {
3263 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3264
3265 if (GET_CODE (to_rtx) != MEM)
3266 abort ();
3267
3268 if (GET_MODE (offset_rtx) != ptr_mode)
3269 {
3270 #ifdef POINTERS_EXTEND_UNSIGNED
3271 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3272 #else
3273 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3274 #endif
3275 }
3276
3277 /* A constant address in TO_RTX can have VOIDmode, we must not try
3278 to call force_reg for that case. Avoid that case. */
3279 if (GET_CODE (to_rtx) == MEM
3280 && GET_MODE (to_rtx) == BLKmode
3281 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3282 && bitsize
3283 && (bitpos % bitsize) == 0
3284 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3285 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3286 {
3287 rtx temp = change_address (to_rtx, mode1,
3288 plus_constant (XEXP (to_rtx, 0),
3289 (bitpos /
3290 BITS_PER_UNIT)));
3291 if (GET_CODE (XEXP (temp, 0)) == REG)
3292 to_rtx = temp;
3293 else
3294 to_rtx = change_address (to_rtx, mode1,
3295 force_reg (GET_MODE (XEXP (temp, 0)),
3296 XEXP (temp, 0)));
3297 bitpos = 0;
3298 }
3299
3300 to_rtx = change_address (to_rtx, VOIDmode,
3301 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3302 force_reg (ptr_mode,
3303 offset_rtx)));
3304 }
3305
3306 if (volatilep)
3307 {
3308 if (GET_CODE (to_rtx) == MEM)
3309 {
3310 /* When the offset is zero, to_rtx is the address of the
3311 structure we are storing into, and hence may be shared.
3312 We must make a new MEM before setting the volatile bit. */
3313 if (offset == 0)
3314 to_rtx = copy_rtx (to_rtx);
3315
3316 MEM_VOLATILE_P (to_rtx) = 1;
3317 }
3318 #if 0 /* This was turned off because, when a field is volatile
3319 in an object which is not volatile, the object may be in a register,
3320 and then we would abort over here. */
3321 else
3322 abort ();
3323 #endif
3324 }
3325
3326 if (TREE_CODE (to) == COMPONENT_REF
3327 && TREE_READONLY (TREE_OPERAND (to, 1)))
3328 {
3329 if (offset == 0)
3330 to_rtx = copy_rtx (to_rtx);
3331
3332 RTX_UNCHANGING_P (to_rtx) = 1;
3333 }
3334
3335 /* Check the access. */
3336 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3337 {
3338 rtx to_addr;
3339 int size;
3340 int best_mode_size;
3341 enum machine_mode best_mode;
3342
3343 best_mode = get_best_mode (bitsize, bitpos,
3344 TYPE_ALIGN (TREE_TYPE (tem)),
3345 mode1, volatilep);
3346 if (best_mode == VOIDmode)
3347 best_mode = QImode;
3348
3349 best_mode_size = GET_MODE_BITSIZE (best_mode);
3350 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3351 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3352 size *= GET_MODE_SIZE (best_mode);
3353
3354 /* Check the access right of the pointer. */
3355 if (size)
3356 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3357 to_addr, Pmode,
3358 GEN_INT (size), TYPE_MODE (sizetype),
3359 GEN_INT (MEMORY_USE_WO),
3360 TYPE_MODE (integer_type_node));
3361 }
3362
3363 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3364 (want_value
3365 /* Spurious cast makes HPUX compiler happy. */
3366 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3367 : VOIDmode),
3368 unsignedp,
3369 /* Required alignment of containing datum. */
3370 alignment,
3371 int_size_in_bytes (TREE_TYPE (tem)),
3372 get_alias_set (to));
3373 preserve_temp_slots (result);
3374 free_temp_slots ();
3375 pop_temp_slots ();
3376
3377 /* If the value is meaningful, convert RESULT to the proper mode.
3378 Otherwise, return nothing. */
3379 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3380 TYPE_MODE (TREE_TYPE (from)),
3381 result,
3382 TREE_UNSIGNED (TREE_TYPE (to)))
3383 : NULL_RTX);
3384 }
3385
3386 /* If the rhs is a function call and its value is not an aggregate,
3387 call the function before we start to compute the lhs.
3388 This is needed for correct code for cases such as
3389 val = setjmp (buf) on machines where reference to val
3390 requires loading up part of an address in a separate insn.
3391
3392 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3393 a promoted variable where the zero- or sign- extension needs to be done.
3394 Handling this in the normal way is safe because no computation is done
3395 before the call. */
3396 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3397 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3398 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3399 {
3400 rtx value;
3401
3402 push_temp_slots ();
3403 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3404 if (to_rtx == 0)
3405 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3406
3407 /* Handle calls that return values in multiple non-contiguous locations.
3408 The Irix 6 ABI has examples of this. */
3409 if (GET_CODE (to_rtx) == PARALLEL)
3410 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3411 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3412 else if (GET_MODE (to_rtx) == BLKmode)
3413 emit_block_move (to_rtx, value, expr_size (from),
3414 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3415 else
3416 {
3417 #ifdef POINTERS_EXTEND_UNSIGNED
3418 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3419 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3420 value = convert_memory_address (GET_MODE (to_rtx), value);
3421 #endif
3422 emit_move_insn (to_rtx, value);
3423 }
3424 preserve_temp_slots (to_rtx);
3425 free_temp_slots ();
3426 pop_temp_slots ();
3427 return want_value ? to_rtx : NULL_RTX;
3428 }
3429
3430 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3431 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3432
3433 if (to_rtx == 0)
3434 {
3435 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3436 if (GET_CODE (to_rtx) == MEM)
3437 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3438 }
3439
3440 /* Don't move directly into a return register. */
3441 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3442 {
3443 rtx temp;
3444
3445 push_temp_slots ();
3446 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3447 emit_move_insn (to_rtx, temp);
3448 preserve_temp_slots (to_rtx);
3449 free_temp_slots ();
3450 pop_temp_slots ();
3451 return want_value ? to_rtx : NULL_RTX;
3452 }
3453
3454 /* In case we are returning the contents of an object which overlaps
3455 the place the value is being stored, use a safe function when copying
3456 a value through a pointer into a structure value return block. */
3457 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3458 && current_function_returns_struct
3459 && !current_function_returns_pcc_struct)
3460 {
3461 rtx from_rtx, size;
3462
3463 push_temp_slots ();
3464 size = expr_size (from);
3465 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3466 EXPAND_MEMORY_USE_DONT);
3467
3468 /* Copy the rights of the bitmap. */
3469 if (current_function_check_memory_usage)
3470 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3471 XEXP (to_rtx, 0), Pmode,
3472 XEXP (from_rtx, 0), Pmode,
3473 convert_to_mode (TYPE_MODE (sizetype),
3474 size, TREE_UNSIGNED (sizetype)),
3475 TYPE_MODE (sizetype));
3476
3477 #ifdef TARGET_MEM_FUNCTIONS
3478 emit_library_call (memcpy_libfunc, 0,
3479 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3480 XEXP (from_rtx, 0), Pmode,
3481 convert_to_mode (TYPE_MODE (sizetype),
3482 size, TREE_UNSIGNED (sizetype)),
3483 TYPE_MODE (sizetype));
3484 #else
3485 emit_library_call (bcopy_libfunc, 0,
3486 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3487 XEXP (to_rtx, 0), Pmode,
3488 convert_to_mode (TYPE_MODE (integer_type_node),
3489 size, TREE_UNSIGNED (integer_type_node)),
3490 TYPE_MODE (integer_type_node));
3491 #endif
3492
3493 preserve_temp_slots (to_rtx);
3494 free_temp_slots ();
3495 pop_temp_slots ();
3496 return want_value ? to_rtx : NULL_RTX;
3497 }
3498
3499 /* Compute FROM and store the value in the rtx we got. */
3500
3501 push_temp_slots ();
3502 result = store_expr (from, to_rtx, want_value);
3503 preserve_temp_slots (result);
3504 free_temp_slots ();
3505 pop_temp_slots ();
3506 return want_value ? result : NULL_RTX;
3507 }
3508
3509 /* Generate code for computing expression EXP,
3510 and storing the value into TARGET.
3511 TARGET may contain a QUEUED rtx.
3512
3513 If WANT_VALUE is nonzero, return a copy of the value
3514 not in TARGET, so that we can be sure to use the proper
3515 value in a containing expression even if TARGET has something
3516 else stored in it. If possible, we copy the value through a pseudo
3517 and return that pseudo. Or, if the value is constant, we try to
3518 return the constant. In some cases, we return a pseudo
3519 copied *from* TARGET.
3520
3521 If the mode is BLKmode then we may return TARGET itself.
3522 It turns out that in BLKmode it doesn't cause a problem.
3523 because C has no operators that could combine two different
3524 assignments into the same BLKmode object with different values
3525 with no sequence point. Will other languages need this to
3526 be more thorough?
3527
3528 If WANT_VALUE is 0, we return NULL, to make sure
3529 to catch quickly any cases where the caller uses the value
3530 and fails to set WANT_VALUE. */
3531
3532 rtx
3533 store_expr (exp, target, want_value)
3534 register tree exp;
3535 register rtx target;
3536 int want_value;
3537 {
3538 register rtx temp;
3539 int dont_return_target = 0;
3540
3541 if (TREE_CODE (exp) == COMPOUND_EXPR)
3542 {
3543 /* Perform first part of compound expression, then assign from second
3544 part. */
3545 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3546 emit_queue ();
3547 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3548 }
3549 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3550 {
3551 /* For conditional expression, get safe form of the target. Then
3552 test the condition, doing the appropriate assignment on either
3553 side. This avoids the creation of unnecessary temporaries.
3554 For non-BLKmode, it is more efficient not to do this. */
3555
3556 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3557
3558 emit_queue ();
3559 target = protect_from_queue (target, 1);
3560
3561 do_pending_stack_adjust ();
3562 NO_DEFER_POP;
3563 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3564 start_cleanup_deferral ();
3565 store_expr (TREE_OPERAND (exp, 1), target, 0);
3566 end_cleanup_deferral ();
3567 emit_queue ();
3568 emit_jump_insn (gen_jump (lab2));
3569 emit_barrier ();
3570 emit_label (lab1);
3571 start_cleanup_deferral ();
3572 store_expr (TREE_OPERAND (exp, 2), target, 0);
3573 end_cleanup_deferral ();
3574 emit_queue ();
3575 emit_label (lab2);
3576 OK_DEFER_POP;
3577
3578 return want_value ? target : NULL_RTX;
3579 }
3580 else if (queued_subexp_p (target))
3581 /* If target contains a postincrement, let's not risk
3582 using it as the place to generate the rhs. */
3583 {
3584 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3585 {
3586 /* Expand EXP into a new pseudo. */
3587 temp = gen_reg_rtx (GET_MODE (target));
3588 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3589 }
3590 else
3591 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3592
3593 /* If target is volatile, ANSI requires accessing the value
3594 *from* the target, if it is accessed. So make that happen.
3595 In no case return the target itself. */
3596 if (! MEM_VOLATILE_P (target) && want_value)
3597 dont_return_target = 1;
3598 }
3599 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3600 && GET_MODE (target) != BLKmode)
3601 /* If target is in memory and caller wants value in a register instead,
3602 arrange that. Pass TARGET as target for expand_expr so that,
3603 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3604 We know expand_expr will not use the target in that case.
3605 Don't do this if TARGET is volatile because we are supposed
3606 to write it and then read it. */
3607 {
3608 temp = expand_expr (exp, target, GET_MODE (target), 0);
3609 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3610 temp = copy_to_reg (temp);
3611 dont_return_target = 1;
3612 }
3613 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3614 /* If this is an scalar in a register that is stored in a wider mode
3615 than the declared mode, compute the result into its declared mode
3616 and then convert to the wider mode. Our value is the computed
3617 expression. */
3618 {
3619 /* If we don't want a value, we can do the conversion inside EXP,
3620 which will often result in some optimizations. Do the conversion
3621 in two steps: first change the signedness, if needed, then
3622 the extend. But don't do this if the type of EXP is a subtype
3623 of something else since then the conversion might involve
3624 more than just converting modes. */
3625 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3626 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3627 {
3628 if (TREE_UNSIGNED (TREE_TYPE (exp))
3629 != SUBREG_PROMOTED_UNSIGNED_P (target))
3630 exp
3631 = convert
3632 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3633 TREE_TYPE (exp)),
3634 exp);
3635
3636 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3637 SUBREG_PROMOTED_UNSIGNED_P (target)),
3638 exp);
3639 }
3640
3641 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3642
3643 /* If TEMP is a volatile MEM and we want a result value, make
3644 the access now so it gets done only once. Likewise if
3645 it contains TARGET. */
3646 if (GET_CODE (temp) == MEM && want_value
3647 && (MEM_VOLATILE_P (temp)
3648 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3649 temp = copy_to_reg (temp);
3650
3651 /* If TEMP is a VOIDmode constant, use convert_modes to make
3652 sure that we properly convert it. */
3653 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3654 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3655 TYPE_MODE (TREE_TYPE (exp)), temp,
3656 SUBREG_PROMOTED_UNSIGNED_P (target));
3657
3658 convert_move (SUBREG_REG (target), temp,
3659 SUBREG_PROMOTED_UNSIGNED_P (target));
3660
3661 /* If we promoted a constant, change the mode back down to match
3662 target. Otherwise, the caller might get confused by a result whose
3663 mode is larger than expected. */
3664
3665 if (want_value && GET_MODE (temp) != GET_MODE (target)
3666 && GET_MODE (temp) != VOIDmode)
3667 {
3668 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3669 SUBREG_PROMOTED_VAR_P (temp) = 1;
3670 SUBREG_PROMOTED_UNSIGNED_P (temp)
3671 = SUBREG_PROMOTED_UNSIGNED_P (target);
3672 }
3673
3674 return want_value ? temp : NULL_RTX;
3675 }
3676 else
3677 {
3678 temp = expand_expr (exp, target, GET_MODE (target), 0);
3679 /* Return TARGET if it's a specified hardware register.
3680 If TARGET is a volatile mem ref, either return TARGET
3681 or return a reg copied *from* TARGET; ANSI requires this.
3682
3683 Otherwise, if TEMP is not TARGET, return TEMP
3684 if it is constant (for efficiency),
3685 or if we really want the correct value. */
3686 if (!(target && GET_CODE (target) == REG
3687 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3688 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3689 && ! rtx_equal_p (temp, target)
3690 && (CONSTANT_P (temp) || want_value))
3691 dont_return_target = 1;
3692 }
3693
3694 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3695 the same as that of TARGET, adjust the constant. This is needed, for
3696 example, in case it is a CONST_DOUBLE and we want only a word-sized
3697 value. */
3698 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3699 && TREE_CODE (exp) != ERROR_MARK
3700 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3701 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3702 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3703
3704 if (current_function_check_memory_usage
3705 && GET_CODE (target) == MEM
3706 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3707 {
3708 if (GET_CODE (temp) == MEM)
3709 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3710 XEXP (target, 0), Pmode,
3711 XEXP (temp, 0), Pmode,
3712 expr_size (exp), TYPE_MODE (sizetype));
3713 else
3714 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3715 XEXP (target, 0), Pmode,
3716 expr_size (exp), TYPE_MODE (sizetype),
3717 GEN_INT (MEMORY_USE_WO),
3718 TYPE_MODE (integer_type_node));
3719 }
3720
3721 /* If value was not generated in the target, store it there.
3722 Convert the value to TARGET's type first if nec. */
3723 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3724 one or both of them are volatile memory refs, we have to distinguish
3725 two cases:
3726 - expand_expr has used TARGET. In this case, we must not generate
3727 another copy. This can be detected by TARGET being equal according
3728 to == .
3729 - expand_expr has not used TARGET - that means that the source just
3730 happens to have the same RTX form. Since temp will have been created
3731 by expand_expr, it will compare unequal according to == .
3732 We must generate a copy in this case, to reach the correct number
3733 of volatile memory references. */
3734
3735 if ((! rtx_equal_p (temp, target)
3736 || (temp != target && (side_effects_p (temp)
3737 || side_effects_p (target))))
3738 && TREE_CODE (exp) != ERROR_MARK)
3739 {
3740 target = protect_from_queue (target, 1);
3741 if (GET_MODE (temp) != GET_MODE (target)
3742 && GET_MODE (temp) != VOIDmode)
3743 {
3744 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3745 if (dont_return_target)
3746 {
3747 /* In this case, we will return TEMP,
3748 so make sure it has the proper mode.
3749 But don't forget to store the value into TARGET. */
3750 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3751 emit_move_insn (target, temp);
3752 }
3753 else
3754 convert_move (target, temp, unsignedp);
3755 }
3756
3757 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3758 {
3759 /* Handle copying a string constant into an array.
3760 The string constant may be shorter than the array.
3761 So copy just the string's actual length, and clear the rest. */
3762 rtx size;
3763 rtx addr;
3764
3765 /* Get the size of the data type of the string,
3766 which is actually the size of the target. */
3767 size = expr_size (exp);
3768 if (GET_CODE (size) == CONST_INT
3769 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3770 emit_block_move (target, temp, size,
3771 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3772 else
3773 {
3774 /* Compute the size of the data to copy from the string. */
3775 tree copy_size
3776 = size_binop (MIN_EXPR,
3777 make_tree (sizetype, size),
3778 convert (sizetype,
3779 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3780 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3781 VOIDmode, 0);
3782 rtx label = 0;
3783
3784 /* Copy that much. */
3785 emit_block_move (target, temp, copy_size_rtx,
3786 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3787
3788 /* Figure out how much is left in TARGET that we have to clear.
3789 Do all calculations in ptr_mode. */
3790
3791 addr = XEXP (target, 0);
3792 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3793
3794 if (GET_CODE (copy_size_rtx) == CONST_INT)
3795 {
3796 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3797 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3798 }
3799 else
3800 {
3801 addr = force_reg (ptr_mode, addr);
3802 addr = expand_binop (ptr_mode, add_optab, addr,
3803 copy_size_rtx, NULL_RTX, 0,
3804 OPTAB_LIB_WIDEN);
3805
3806 size = expand_binop (ptr_mode, sub_optab, size,
3807 copy_size_rtx, NULL_RTX, 0,
3808 OPTAB_LIB_WIDEN);
3809
3810 label = gen_label_rtx ();
3811 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3812 GET_MODE (size), 0, 0, label);
3813 }
3814
3815 if (size != const0_rtx)
3816 {
3817 /* Be sure we can write on ADDR. */
3818 if (current_function_check_memory_usage)
3819 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3820 addr, Pmode,
3821 size, TYPE_MODE (sizetype),
3822 GEN_INT (MEMORY_USE_WO),
3823 TYPE_MODE (integer_type_node));
3824 #ifdef TARGET_MEM_FUNCTIONS
3825 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3826 addr, ptr_mode,
3827 const0_rtx, TYPE_MODE (integer_type_node),
3828 convert_to_mode (TYPE_MODE (sizetype),
3829 size,
3830 TREE_UNSIGNED (sizetype)),
3831 TYPE_MODE (sizetype));
3832 #else
3833 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3834 addr, ptr_mode,
3835 convert_to_mode (TYPE_MODE (integer_type_node),
3836 size,
3837 TREE_UNSIGNED (integer_type_node)),
3838 TYPE_MODE (integer_type_node));
3839 #endif
3840 }
3841
3842 if (label)
3843 emit_label (label);
3844 }
3845 }
3846 /* Handle calls that return values in multiple non-contiguous locations.
3847 The Irix 6 ABI has examples of this. */
3848 else if (GET_CODE (target) == PARALLEL)
3849 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3850 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3851 else if (GET_MODE (temp) == BLKmode)
3852 emit_block_move (target, temp, expr_size (exp),
3853 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3854 else
3855 emit_move_insn (target, temp);
3856 }
3857
3858 /* If we don't want a value, return NULL_RTX. */
3859 if (! want_value)
3860 return NULL_RTX;
3861
3862 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3863 ??? The latter test doesn't seem to make sense. */
3864 else if (dont_return_target && GET_CODE (temp) != MEM)
3865 return temp;
3866
3867 /* Return TARGET itself if it is a hard register. */
3868 else if (want_value && GET_MODE (target) != BLKmode
3869 && ! (GET_CODE (target) == REG
3870 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3871 return copy_to_reg (target);
3872
3873 else
3874 return target;
3875 }
3876 \f
3877 /* Return 1 if EXP just contains zeros. */
3878
3879 static int
3880 is_zeros_p (exp)
3881 tree exp;
3882 {
3883 tree elt;
3884
3885 switch (TREE_CODE (exp))
3886 {
3887 case CONVERT_EXPR:
3888 case NOP_EXPR:
3889 case NON_LVALUE_EXPR:
3890 return is_zeros_p (TREE_OPERAND (exp, 0));
3891
3892 case INTEGER_CST:
3893 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3894
3895 case COMPLEX_CST:
3896 return
3897 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3898
3899 case REAL_CST:
3900 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3901
3902 case CONSTRUCTOR:
3903 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3904 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3905 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3906 if (! is_zeros_p (TREE_VALUE (elt)))
3907 return 0;
3908
3909 return 1;
3910
3911 default:
3912 return 0;
3913 }
3914 }
3915
3916 /* Return 1 if EXP contains mostly (3/4) zeros. */
3917
3918 static int
3919 mostly_zeros_p (exp)
3920 tree exp;
3921 {
3922 if (TREE_CODE (exp) == CONSTRUCTOR)
3923 {
3924 int elts = 0, zeros = 0;
3925 tree elt = CONSTRUCTOR_ELTS (exp);
3926 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3927 {
3928 /* If there are no ranges of true bits, it is all zero. */
3929 return elt == NULL_TREE;
3930 }
3931 for (; elt; elt = TREE_CHAIN (elt))
3932 {
3933 /* We do not handle the case where the index is a RANGE_EXPR,
3934 so the statistic will be somewhat inaccurate.
3935 We do make a more accurate count in store_constructor itself,
3936 so since this function is only used for nested array elements,
3937 this should be close enough. */
3938 if (mostly_zeros_p (TREE_VALUE (elt)))
3939 zeros++;
3940 elts++;
3941 }
3942
3943 return 4 * zeros >= 3 * elts;
3944 }
3945
3946 return is_zeros_p (exp);
3947 }
3948 \f
3949 /* Helper function for store_constructor.
3950 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3951 TYPE is the type of the CONSTRUCTOR, not the element type.
3952 ALIGN and CLEARED are as for store_constructor.
3953
3954 This provides a recursive shortcut back to store_constructor when it isn't
3955 necessary to go through store_field. This is so that we can pass through
3956 the cleared field to let store_constructor know that we may not have to
3957 clear a substructure if the outer structure has already been cleared. */
3958
3959 static void
3960 store_constructor_field (target, bitsize, bitpos,
3961 mode, exp, type, align, cleared)
3962 rtx target;
3963 int bitsize, bitpos;
3964 enum machine_mode mode;
3965 tree exp, type;
3966 int align;
3967 int cleared;
3968 {
3969 if (TREE_CODE (exp) == CONSTRUCTOR
3970 && bitpos % BITS_PER_UNIT == 0
3971 /* If we have a non-zero bitpos for a register target, then we just
3972 let store_field do the bitfield handling. This is unlikely to
3973 generate unnecessary clear instructions anyways. */
3974 && (bitpos == 0 || GET_CODE (target) == MEM))
3975 {
3976 if (bitpos != 0)
3977 target = change_address (target, VOIDmode,
3978 plus_constant (XEXP (target, 0),
3979 bitpos / BITS_PER_UNIT));
3980 store_constructor (exp, target, align, cleared);
3981 }
3982 else
3983 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
3984 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
3985 int_size_in_bytes (type), cleared);
3986 }
3987
3988 /* Store the value of constructor EXP into the rtx TARGET.
3989 TARGET is either a REG or a MEM.
3990 ALIGN is the maximum known alignment for TARGET, in bits.
3991 CLEARED is true if TARGET is known to have been zero'd. */
3992
3993 static void
3994 store_constructor (exp, target, align, cleared)
3995 tree exp;
3996 rtx target;
3997 int align;
3998 int cleared;
3999 {
4000 tree type = TREE_TYPE (exp);
4001 #ifdef WORD_REGISTER_OPERATIONS
4002 rtx exp_size = expr_size (exp);
4003 #endif
4004
4005 /* We know our target cannot conflict, since safe_from_p has been called. */
4006 #if 0
4007 /* Don't try copying piece by piece into a hard register
4008 since that is vulnerable to being clobbered by EXP.
4009 Instead, construct in a pseudo register and then copy it all. */
4010 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4011 {
4012 rtx temp = gen_reg_rtx (GET_MODE (target));
4013 store_constructor (exp, temp, 0);
4014 emit_move_insn (target, temp);
4015 return;
4016 }
4017 #endif
4018
4019 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4020 || TREE_CODE (type) == QUAL_UNION_TYPE)
4021 {
4022 register tree elt;
4023
4024 /* Inform later passes that the whole union value is dead. */
4025 if (TREE_CODE (type) == UNION_TYPE
4026 || TREE_CODE (type) == QUAL_UNION_TYPE)
4027 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4028
4029 /* If we are building a static constructor into a register,
4030 set the initial value as zero so we can fold the value into
4031 a constant. But if more than one register is involved,
4032 this probably loses. */
4033 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4034 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4035 {
4036 if (! cleared)
4037 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4038
4039 cleared = 1;
4040 }
4041
4042 /* If the constructor has fewer fields than the structure
4043 or if we are initializing the structure to mostly zeros,
4044 clear the whole structure first. */
4045 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4046 != list_length (TYPE_FIELDS (type)))
4047 || mostly_zeros_p (exp))
4048 {
4049 if (! cleared)
4050 clear_storage (target, expr_size (exp),
4051 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4052
4053 cleared = 1;
4054 }
4055 else
4056 /* Inform later passes that the old value is dead. */
4057 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4058
4059 /* Store each element of the constructor into
4060 the corresponding field of TARGET. */
4061
4062 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4063 {
4064 register tree field = TREE_PURPOSE (elt);
4065 #ifdef WORD_REGISTER_OPERATIONS
4066 tree value = TREE_VALUE (elt);
4067 #endif
4068 register enum machine_mode mode;
4069 int bitsize;
4070 int bitpos = 0;
4071 int unsignedp;
4072 tree pos, constant = 0, offset = 0;
4073 rtx to_rtx = target;
4074
4075 /* Just ignore missing fields.
4076 We cleared the whole structure, above,
4077 if any fields are missing. */
4078 if (field == 0)
4079 continue;
4080
4081 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4082 continue;
4083
4084 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4085 unsignedp = TREE_UNSIGNED (field);
4086 mode = DECL_MODE (field);
4087 if (DECL_BIT_FIELD (field))
4088 mode = VOIDmode;
4089
4090 pos = DECL_FIELD_BITPOS (field);
4091 if (TREE_CODE (pos) == INTEGER_CST)
4092 constant = pos;
4093 else if (TREE_CODE (pos) == PLUS_EXPR
4094 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4095 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4096 else
4097 offset = pos;
4098
4099 if (constant)
4100 bitpos = TREE_INT_CST_LOW (constant);
4101
4102 if (offset)
4103 {
4104 rtx offset_rtx;
4105
4106 if (contains_placeholder_p (offset))
4107 offset = build (WITH_RECORD_EXPR, sizetype,
4108 offset, make_tree (TREE_TYPE (exp), target));
4109
4110 offset = size_binop (FLOOR_DIV_EXPR, offset,
4111 size_int (BITS_PER_UNIT));
4112
4113 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4114 if (GET_CODE (to_rtx) != MEM)
4115 abort ();
4116
4117 if (GET_MODE (offset_rtx) != ptr_mode)
4118 {
4119 #ifdef POINTERS_EXTEND_UNSIGNED
4120 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4121 #else
4122 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4123 #endif
4124 }
4125
4126 to_rtx
4127 = change_address (to_rtx, VOIDmode,
4128 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4129 force_reg (ptr_mode,
4130 offset_rtx)));
4131 }
4132
4133 if (TREE_READONLY (field))
4134 {
4135 if (GET_CODE (to_rtx) == MEM)
4136 to_rtx = copy_rtx (to_rtx);
4137
4138 RTX_UNCHANGING_P (to_rtx) = 1;
4139 }
4140
4141 #ifdef WORD_REGISTER_OPERATIONS
4142 /* If this initializes a field that is smaller than a word, at the
4143 start of a word, try to widen it to a full word.
4144 This special case allows us to output C++ member function
4145 initializations in a form that the optimizers can understand. */
4146 if (constant
4147 && GET_CODE (target) == REG
4148 && bitsize < BITS_PER_WORD
4149 && bitpos % BITS_PER_WORD == 0
4150 && GET_MODE_CLASS (mode) == MODE_INT
4151 && TREE_CODE (value) == INTEGER_CST
4152 && GET_CODE (exp_size) == CONST_INT
4153 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4154 {
4155 tree type = TREE_TYPE (value);
4156 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4157 {
4158 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4159 value = convert (type, value);
4160 }
4161 if (BYTES_BIG_ENDIAN)
4162 value
4163 = fold (build (LSHIFT_EXPR, type, value,
4164 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4165 bitsize = BITS_PER_WORD;
4166 mode = word_mode;
4167 }
4168 #endif
4169 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4170 TREE_VALUE (elt), type,
4171 MIN (align,
4172 DECL_ALIGN (TREE_PURPOSE (elt))),
4173 cleared);
4174 }
4175 }
4176 else if (TREE_CODE (type) == ARRAY_TYPE)
4177 {
4178 register tree elt;
4179 register int i;
4180 int need_to_clear;
4181 tree domain = TYPE_DOMAIN (type);
4182 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4183 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4184 tree elttype = TREE_TYPE (type);
4185
4186 /* If the constructor has fewer elements than the array,
4187 clear the whole array first. Similarly if this is
4188 static constructor of a non-BLKmode object. */
4189 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4190 need_to_clear = 1;
4191 else
4192 {
4193 HOST_WIDE_INT count = 0, zero_count = 0;
4194 need_to_clear = 0;
4195 /* This loop is a more accurate version of the loop in
4196 mostly_zeros_p (it handles RANGE_EXPR in an index).
4197 It is also needed to check for missing elements. */
4198 for (elt = CONSTRUCTOR_ELTS (exp);
4199 elt != NULL_TREE;
4200 elt = TREE_CHAIN (elt))
4201 {
4202 tree index = TREE_PURPOSE (elt);
4203 HOST_WIDE_INT this_node_count;
4204 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4205 {
4206 tree lo_index = TREE_OPERAND (index, 0);
4207 tree hi_index = TREE_OPERAND (index, 1);
4208 if (TREE_CODE (lo_index) != INTEGER_CST
4209 || TREE_CODE (hi_index) != INTEGER_CST)
4210 {
4211 need_to_clear = 1;
4212 break;
4213 }
4214 this_node_count = TREE_INT_CST_LOW (hi_index)
4215 - TREE_INT_CST_LOW (lo_index) + 1;
4216 }
4217 else
4218 this_node_count = 1;
4219 count += this_node_count;
4220 if (mostly_zeros_p (TREE_VALUE (elt)))
4221 zero_count += this_node_count;
4222 }
4223 /* Clear the entire array first if there are any missing elements,
4224 or if the incidence of zero elements is >= 75%. */
4225 if (count < maxelt - minelt + 1
4226 || 4 * zero_count >= 3 * count)
4227 need_to_clear = 1;
4228 }
4229 if (need_to_clear)
4230 {
4231 if (! cleared)
4232 clear_storage (target, expr_size (exp),
4233 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4234 cleared = 1;
4235 }
4236 else
4237 /* Inform later passes that the old value is dead. */
4238 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4239
4240 /* Store each element of the constructor into
4241 the corresponding element of TARGET, determined
4242 by counting the elements. */
4243 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4244 elt;
4245 elt = TREE_CHAIN (elt), i++)
4246 {
4247 register enum machine_mode mode;
4248 int bitsize;
4249 int bitpos;
4250 int unsignedp;
4251 tree value = TREE_VALUE (elt);
4252 int align = TYPE_ALIGN (TREE_TYPE (value));
4253 tree index = TREE_PURPOSE (elt);
4254 rtx xtarget = target;
4255
4256 if (cleared && is_zeros_p (value))
4257 continue;
4258
4259 mode = TYPE_MODE (elttype);
4260 bitsize = GET_MODE_BITSIZE (mode);
4261 unsignedp = TREE_UNSIGNED (elttype);
4262
4263 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4264 {
4265 tree lo_index = TREE_OPERAND (index, 0);
4266 tree hi_index = TREE_OPERAND (index, 1);
4267 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4268 struct nesting *loop;
4269 HOST_WIDE_INT lo, hi, count;
4270 tree position;
4271
4272 /* If the range is constant and "small", unroll the loop. */
4273 if (TREE_CODE (lo_index) == INTEGER_CST
4274 && TREE_CODE (hi_index) == INTEGER_CST
4275 && (lo = TREE_INT_CST_LOW (lo_index),
4276 hi = TREE_INT_CST_LOW (hi_index),
4277 count = hi - lo + 1,
4278 (GET_CODE (target) != MEM
4279 || count <= 2
4280 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4281 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4282 <= 40 * 8))))
4283 {
4284 lo -= minelt; hi -= minelt;
4285 for (; lo <= hi; lo++)
4286 {
4287 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4288 store_constructor_field (target, bitsize, bitpos, mode,
4289 value, type, align, cleared);
4290 }
4291 }
4292 else
4293 {
4294 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4295 loop_top = gen_label_rtx ();
4296 loop_end = gen_label_rtx ();
4297
4298 unsignedp = TREE_UNSIGNED (domain);
4299
4300 index = build_decl (VAR_DECL, NULL_TREE, domain);
4301
4302 DECL_RTL (index) = index_r
4303 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4304 &unsignedp, 0));
4305
4306 if (TREE_CODE (value) == SAVE_EXPR
4307 && SAVE_EXPR_RTL (value) == 0)
4308 {
4309 /* Make sure value gets expanded once before the
4310 loop. */
4311 expand_expr (value, const0_rtx, VOIDmode, 0);
4312 emit_queue ();
4313 }
4314 store_expr (lo_index, index_r, 0);
4315 loop = expand_start_loop (0);
4316
4317 /* Assign value to element index. */
4318 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4319 size_int (BITS_PER_UNIT));
4320 position = size_binop (MULT_EXPR,
4321 size_binop (MINUS_EXPR, index,
4322 TYPE_MIN_VALUE (domain)),
4323 position);
4324 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4325 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4326 xtarget = change_address (target, mode, addr);
4327 if (TREE_CODE (value) == CONSTRUCTOR)
4328 store_constructor (value, xtarget, align, cleared);
4329 else
4330 store_expr (value, xtarget, 0);
4331
4332 expand_exit_loop_if_false (loop,
4333 build (LT_EXPR, integer_type_node,
4334 index, hi_index));
4335
4336 expand_increment (build (PREINCREMENT_EXPR,
4337 TREE_TYPE (index),
4338 index, integer_one_node), 0, 0);
4339 expand_end_loop ();
4340 emit_label (loop_end);
4341
4342 /* Needed by stupid register allocation. to extend the
4343 lifetime of pseudo-regs used by target past the end
4344 of the loop. */
4345 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4346 }
4347 }
4348 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4349 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4350 {
4351 rtx pos_rtx, addr;
4352 tree position;
4353
4354 if (index == 0)
4355 index = size_int (i);
4356
4357 if (minelt)
4358 index = size_binop (MINUS_EXPR, index,
4359 TYPE_MIN_VALUE (domain));
4360 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4361 size_int (BITS_PER_UNIT));
4362 position = size_binop (MULT_EXPR, index, position);
4363 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4364 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4365 xtarget = change_address (target, mode, addr);
4366 store_expr (value, xtarget, 0);
4367 }
4368 else
4369 {
4370 if (index != 0)
4371 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4372 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4373 else
4374 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4375 store_constructor_field (target, bitsize, bitpos, mode, value,
4376 type, align, cleared);
4377 }
4378 }
4379 }
4380 /* set constructor assignments */
4381 else if (TREE_CODE (type) == SET_TYPE)
4382 {
4383 tree elt = CONSTRUCTOR_ELTS (exp);
4384 int nbytes = int_size_in_bytes (type), nbits;
4385 tree domain = TYPE_DOMAIN (type);
4386 tree domain_min, domain_max, bitlength;
4387
4388 /* The default implementation strategy is to extract the constant
4389 parts of the constructor, use that to initialize the target,
4390 and then "or" in whatever non-constant ranges we need in addition.
4391
4392 If a large set is all zero or all ones, it is
4393 probably better to set it using memset (if available) or bzero.
4394 Also, if a large set has just a single range, it may also be
4395 better to first clear all the first clear the set (using
4396 bzero/memset), and set the bits we want. */
4397
4398 /* Check for all zeros. */
4399 if (elt == NULL_TREE)
4400 {
4401 if (!cleared)
4402 clear_storage (target, expr_size (exp),
4403 TYPE_ALIGN (type) / BITS_PER_UNIT);
4404 return;
4405 }
4406
4407 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4408 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4409 bitlength = size_binop (PLUS_EXPR,
4410 size_binop (MINUS_EXPR, domain_max, domain_min),
4411 size_one_node);
4412
4413 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4414 abort ();
4415 nbits = TREE_INT_CST_LOW (bitlength);
4416
4417 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4418 are "complicated" (more than one range), initialize (the
4419 constant parts) by copying from a constant. */
4420 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4421 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4422 {
4423 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4424 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4425 char *bit_buffer = (char *) alloca (nbits);
4426 HOST_WIDE_INT word = 0;
4427 int bit_pos = 0;
4428 int ibit = 0;
4429 int offset = 0; /* In bytes from beginning of set. */
4430 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4431 for (;;)
4432 {
4433 if (bit_buffer[ibit])
4434 {
4435 if (BYTES_BIG_ENDIAN)
4436 word |= (1 << (set_word_size - 1 - bit_pos));
4437 else
4438 word |= 1 << bit_pos;
4439 }
4440 bit_pos++; ibit++;
4441 if (bit_pos >= set_word_size || ibit == nbits)
4442 {
4443 if (word != 0 || ! cleared)
4444 {
4445 rtx datum = GEN_INT (word);
4446 rtx to_rtx;
4447 /* The assumption here is that it is safe to use
4448 XEXP if the set is multi-word, but not if
4449 it's single-word. */
4450 if (GET_CODE (target) == MEM)
4451 {
4452 to_rtx = plus_constant (XEXP (target, 0), offset);
4453 to_rtx = change_address (target, mode, to_rtx);
4454 }
4455 else if (offset == 0)
4456 to_rtx = target;
4457 else
4458 abort ();
4459 emit_move_insn (to_rtx, datum);
4460 }
4461 if (ibit == nbits)
4462 break;
4463 word = 0;
4464 bit_pos = 0;
4465 offset += set_word_size / BITS_PER_UNIT;
4466 }
4467 }
4468 }
4469 else if (!cleared)
4470 {
4471 /* Don't bother clearing storage if the set is all ones. */
4472 if (TREE_CHAIN (elt) != NULL_TREE
4473 || (TREE_PURPOSE (elt) == NULL_TREE
4474 ? nbits != 1
4475 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4476 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4477 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4478 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4479 != nbits))))
4480 clear_storage (target, expr_size (exp),
4481 TYPE_ALIGN (type) / BITS_PER_UNIT);
4482 }
4483
4484 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4485 {
4486 /* start of range of element or NULL */
4487 tree startbit = TREE_PURPOSE (elt);
4488 /* end of range of element, or element value */
4489 tree endbit = TREE_VALUE (elt);
4490 #ifdef TARGET_MEM_FUNCTIONS
4491 HOST_WIDE_INT startb, endb;
4492 #endif
4493 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4494
4495 bitlength_rtx = expand_expr (bitlength,
4496 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4497
4498 /* handle non-range tuple element like [ expr ] */
4499 if (startbit == NULL_TREE)
4500 {
4501 startbit = save_expr (endbit);
4502 endbit = startbit;
4503 }
4504 startbit = convert (sizetype, startbit);
4505 endbit = convert (sizetype, endbit);
4506 if (! integer_zerop (domain_min))
4507 {
4508 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4509 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4510 }
4511 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4512 EXPAND_CONST_ADDRESS);
4513 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4514 EXPAND_CONST_ADDRESS);
4515
4516 if (REG_P (target))
4517 {
4518 targetx = assign_stack_temp (GET_MODE (target),
4519 GET_MODE_SIZE (GET_MODE (target)),
4520 0);
4521 emit_move_insn (targetx, target);
4522 }
4523 else if (GET_CODE (target) == MEM)
4524 targetx = target;
4525 else
4526 abort ();
4527
4528 #ifdef TARGET_MEM_FUNCTIONS
4529 /* Optimization: If startbit and endbit are
4530 constants divisible by BITS_PER_UNIT,
4531 call memset instead. */
4532 if (TREE_CODE (startbit) == INTEGER_CST
4533 && TREE_CODE (endbit) == INTEGER_CST
4534 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4535 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4536 {
4537 emit_library_call (memset_libfunc, 0,
4538 VOIDmode, 3,
4539 plus_constant (XEXP (targetx, 0),
4540 startb / BITS_PER_UNIT),
4541 Pmode,
4542 constm1_rtx, TYPE_MODE (integer_type_node),
4543 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4544 TYPE_MODE (sizetype));
4545 }
4546 else
4547 #endif
4548 {
4549 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4550 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4551 bitlength_rtx, TYPE_MODE (sizetype),
4552 startbit_rtx, TYPE_MODE (sizetype),
4553 endbit_rtx, TYPE_MODE (sizetype));
4554 }
4555 if (REG_P (target))
4556 emit_move_insn (target, targetx);
4557 }
4558 }
4559
4560 else
4561 abort ();
4562 }
4563
4564 /* Store the value of EXP (an expression tree)
4565 into a subfield of TARGET which has mode MODE and occupies
4566 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4567 If MODE is VOIDmode, it means that we are storing into a bit-field.
4568
4569 If VALUE_MODE is VOIDmode, return nothing in particular.
4570 UNSIGNEDP is not used in this case.
4571
4572 Otherwise, return an rtx for the value stored. This rtx
4573 has mode VALUE_MODE if that is convenient to do.
4574 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4575
4576 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4577 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4578
4579 ALIAS_SET is the alias set for the destination. This value will
4580 (in general) be different from that for TARGET, since TARGET is a
4581 reference to the containing structure. */
4582
4583 static rtx
4584 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4585 unsignedp, align, total_size, alias_set)
4586 rtx target;
4587 int bitsize, bitpos;
4588 enum machine_mode mode;
4589 tree exp;
4590 enum machine_mode value_mode;
4591 int unsignedp;
4592 int align;
4593 int total_size;
4594 int alias_set;
4595 {
4596 HOST_WIDE_INT width_mask = 0;
4597
4598 if (TREE_CODE (exp) == ERROR_MARK)
4599 return const0_rtx;
4600
4601 if (bitsize < HOST_BITS_PER_WIDE_INT)
4602 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4603
4604 /* If we are storing into an unaligned field of an aligned union that is
4605 in a register, we may have the mode of TARGET being an integer mode but
4606 MODE == BLKmode. In that case, get an aligned object whose size and
4607 alignment are the same as TARGET and store TARGET into it (we can avoid
4608 the store if the field being stored is the entire width of TARGET). Then
4609 call ourselves recursively to store the field into a BLKmode version of
4610 that object. Finally, load from the object into TARGET. This is not
4611 very efficient in general, but should only be slightly more expensive
4612 than the otherwise-required unaligned accesses. Perhaps this can be
4613 cleaned up later. */
4614
4615 if (mode == BLKmode
4616 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4617 {
4618 rtx object = assign_stack_temp (GET_MODE (target),
4619 GET_MODE_SIZE (GET_MODE (target)), 0);
4620 rtx blk_object = copy_rtx (object);
4621
4622 MEM_SET_IN_STRUCT_P (object, 1);
4623 MEM_SET_IN_STRUCT_P (blk_object, 1);
4624 PUT_MODE (blk_object, BLKmode);
4625
4626 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4627 emit_move_insn (object, target);
4628
4629 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4630 align, total_size, alias_set);
4631
4632 /* Even though we aren't returning target, we need to
4633 give it the updated value. */
4634 emit_move_insn (target, object);
4635
4636 return blk_object;
4637 }
4638
4639 /* If the structure is in a register or if the component
4640 is a bit field, we cannot use addressing to access it.
4641 Use bit-field techniques or SUBREG to store in it. */
4642
4643 if (mode == VOIDmode
4644 || (mode != BLKmode && ! direct_store[(int) mode]
4645 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4646 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4647 || GET_CODE (target) == REG
4648 || GET_CODE (target) == SUBREG
4649 /* If the field isn't aligned enough to store as an ordinary memref,
4650 store it as a bit field. */
4651 || (SLOW_UNALIGNED_ACCESS
4652 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4653 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4654 {
4655 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4656
4657 /* If BITSIZE is narrower than the size of the type of EXP
4658 we will be narrowing TEMP. Normally, what's wanted are the
4659 low-order bits. However, if EXP's type is a record and this is
4660 big-endian machine, we want the upper BITSIZE bits. */
4661 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4662 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4663 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4664 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4665 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4666 - bitsize),
4667 temp, 1);
4668
4669 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4670 MODE. */
4671 if (mode != VOIDmode && mode != BLKmode
4672 && mode != TYPE_MODE (TREE_TYPE (exp)))
4673 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4674
4675 /* If the modes of TARGET and TEMP are both BLKmode, both
4676 must be in memory and BITPOS must be aligned on a byte
4677 boundary. If so, we simply do a block copy. */
4678 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4679 {
4680 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4681 || bitpos % BITS_PER_UNIT != 0)
4682 abort ();
4683
4684 target = change_address (target, VOIDmode,
4685 plus_constant (XEXP (target, 0),
4686 bitpos / BITS_PER_UNIT));
4687
4688 emit_block_move (target, temp,
4689 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4690 / BITS_PER_UNIT),
4691 1);
4692
4693 return value_mode == VOIDmode ? const0_rtx : target;
4694 }
4695
4696 /* Store the value in the bitfield. */
4697 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4698 if (value_mode != VOIDmode)
4699 {
4700 /* The caller wants an rtx for the value. */
4701 /* If possible, avoid refetching from the bitfield itself. */
4702 if (width_mask != 0
4703 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4704 {
4705 tree count;
4706 enum machine_mode tmode;
4707
4708 if (unsignedp)
4709 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4710 tmode = GET_MODE (temp);
4711 if (tmode == VOIDmode)
4712 tmode = value_mode;
4713 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4714 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4715 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4716 }
4717 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4718 NULL_RTX, value_mode, 0, align,
4719 total_size);
4720 }
4721 return const0_rtx;
4722 }
4723 else
4724 {
4725 rtx addr = XEXP (target, 0);
4726 rtx to_rtx;
4727
4728 /* If a value is wanted, it must be the lhs;
4729 so make the address stable for multiple use. */
4730
4731 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4732 && ! CONSTANT_ADDRESS_P (addr)
4733 /* A frame-pointer reference is already stable. */
4734 && ! (GET_CODE (addr) == PLUS
4735 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4736 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4737 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4738 addr = copy_to_reg (addr);
4739
4740 /* Now build a reference to just the desired component. */
4741
4742 to_rtx = copy_rtx (change_address (target, mode,
4743 plus_constant (addr,
4744 (bitpos
4745 / BITS_PER_UNIT))));
4746 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4747 MEM_ALIAS_SET (to_rtx) = alias_set;
4748
4749 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4750 }
4751 }
4752 \f
4753 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4754 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4755 ARRAY_REFs and find the ultimate containing object, which we return.
4756
4757 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4758 bit position, and *PUNSIGNEDP to the signedness of the field.
4759 If the position of the field is variable, we store a tree
4760 giving the variable offset (in units) in *POFFSET.
4761 This offset is in addition to the bit position.
4762 If the position is not variable, we store 0 in *POFFSET.
4763 We set *PALIGNMENT to the alignment in bytes of the address that will be
4764 computed. This is the alignment of the thing we return if *POFFSET
4765 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4766
4767 If any of the extraction expressions is volatile,
4768 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4769
4770 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4771 is a mode that can be used to access the field. In that case, *PBITSIZE
4772 is redundant.
4773
4774 If the field describes a variable-sized object, *PMODE is set to
4775 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4776 this case, but the address of the object can be found. */
4777
4778 tree
4779 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4780 punsignedp, pvolatilep, palignment)
4781 tree exp;
4782 int *pbitsize;
4783 int *pbitpos;
4784 tree *poffset;
4785 enum machine_mode *pmode;
4786 int *punsignedp;
4787 int *pvolatilep;
4788 int *palignment;
4789 {
4790 tree orig_exp = exp;
4791 tree size_tree = 0;
4792 enum machine_mode mode = VOIDmode;
4793 tree offset = integer_zero_node;
4794 unsigned int alignment = BIGGEST_ALIGNMENT;
4795
4796 if (TREE_CODE (exp) == COMPONENT_REF)
4797 {
4798 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4799 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4800 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4801 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4802 }
4803 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4804 {
4805 size_tree = TREE_OPERAND (exp, 1);
4806 *punsignedp = TREE_UNSIGNED (exp);
4807 }
4808 else
4809 {
4810 mode = TYPE_MODE (TREE_TYPE (exp));
4811 if (mode == BLKmode)
4812 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4813
4814 *pbitsize = GET_MODE_BITSIZE (mode);
4815 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4816 }
4817
4818 if (size_tree)
4819 {
4820 if (TREE_CODE (size_tree) != INTEGER_CST)
4821 mode = BLKmode, *pbitsize = -1;
4822 else
4823 *pbitsize = TREE_INT_CST_LOW (size_tree);
4824 }
4825
4826 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4827 and find the ultimate containing object. */
4828
4829 *pbitpos = 0;
4830
4831 while (1)
4832 {
4833 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4834 {
4835 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4836 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4837 : TREE_OPERAND (exp, 2));
4838 tree constant = integer_zero_node, var = pos;
4839
4840 /* If this field hasn't been filled in yet, don't go
4841 past it. This should only happen when folding expressions
4842 made during type construction. */
4843 if (pos == 0)
4844 break;
4845
4846 /* Assume here that the offset is a multiple of a unit.
4847 If not, there should be an explicitly added constant. */
4848 if (TREE_CODE (pos) == PLUS_EXPR
4849 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4850 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4851 else if (TREE_CODE (pos) == INTEGER_CST)
4852 constant = pos, var = integer_zero_node;
4853
4854 *pbitpos += TREE_INT_CST_LOW (constant);
4855 offset = size_binop (PLUS_EXPR, offset,
4856 size_binop (EXACT_DIV_EXPR, var,
4857 size_int (BITS_PER_UNIT)));
4858 }
4859
4860 else if (TREE_CODE (exp) == ARRAY_REF)
4861 {
4862 /* This code is based on the code in case ARRAY_REF in expand_expr
4863 below. We assume here that the size of an array element is
4864 always an integral multiple of BITS_PER_UNIT. */
4865
4866 tree index = TREE_OPERAND (exp, 1);
4867 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4868 tree low_bound
4869 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4870 tree index_type = TREE_TYPE (index);
4871 tree xindex;
4872
4873 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4874 {
4875 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4876 index);
4877 index_type = TREE_TYPE (index);
4878 }
4879
4880 /* Optimize the special-case of a zero lower bound.
4881
4882 We convert the low_bound to sizetype to avoid some problems
4883 with constant folding. (E.g. suppose the lower bound is 1,
4884 and its mode is QI. Without the conversion, (ARRAY
4885 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4886 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4887
4888 But sizetype isn't quite right either (especially if
4889 the lowbound is negative). FIXME */
4890
4891 if (! integer_zerop (low_bound))
4892 index = fold (build (MINUS_EXPR, index_type, index,
4893 convert (sizetype, low_bound)));
4894
4895 if (TREE_CODE (index) == INTEGER_CST)
4896 {
4897 index = convert (sbitsizetype, index);
4898 index_type = TREE_TYPE (index);
4899 }
4900
4901 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4902 convert (sbitsizetype,
4903 TYPE_SIZE (TREE_TYPE (exp)))));
4904
4905 if (TREE_CODE (xindex) == INTEGER_CST
4906 && TREE_INT_CST_HIGH (xindex) == 0)
4907 *pbitpos += TREE_INT_CST_LOW (xindex);
4908 else
4909 {
4910 /* Either the bit offset calculated above is not constant, or
4911 it overflowed. In either case, redo the multiplication
4912 against the size in units. This is especially important
4913 in the non-constant case to avoid a division at runtime. */
4914 xindex = fold (build (MULT_EXPR, ssizetype, index,
4915 convert (ssizetype,
4916 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4917
4918 if (contains_placeholder_p (xindex))
4919 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4920
4921 offset = size_binop (PLUS_EXPR, offset, xindex);
4922 }
4923 }
4924 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4925 && ! ((TREE_CODE (exp) == NOP_EXPR
4926 || TREE_CODE (exp) == CONVERT_EXPR)
4927 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4928 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4929 != UNION_TYPE))
4930 && (TYPE_MODE (TREE_TYPE (exp))
4931 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4932 break;
4933
4934 /* If any reference in the chain is volatile, the effect is volatile. */
4935 if (TREE_THIS_VOLATILE (exp))
4936 *pvolatilep = 1;
4937
4938 /* If the offset is non-constant already, then we can't assume any
4939 alignment more than the alignment here. */
4940 if (! integer_zerop (offset))
4941 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4942
4943 exp = TREE_OPERAND (exp, 0);
4944 }
4945
4946 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4947 alignment = MIN (alignment, DECL_ALIGN (exp));
4948 else if (TREE_TYPE (exp) != 0)
4949 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4950
4951 if (integer_zerop (offset))
4952 offset = 0;
4953
4954 if (offset != 0 && contains_placeholder_p (offset))
4955 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4956
4957 *pmode = mode;
4958 *poffset = offset;
4959 *palignment = alignment / BITS_PER_UNIT;
4960 return exp;
4961 }
4962
4963 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4964 static enum memory_use_mode
4965 get_memory_usage_from_modifier (modifier)
4966 enum expand_modifier modifier;
4967 {
4968 switch (modifier)
4969 {
4970 case EXPAND_NORMAL:
4971 case EXPAND_SUM:
4972 return MEMORY_USE_RO;
4973 break;
4974 case EXPAND_MEMORY_USE_WO:
4975 return MEMORY_USE_WO;
4976 break;
4977 case EXPAND_MEMORY_USE_RW:
4978 return MEMORY_USE_RW;
4979 break;
4980 case EXPAND_MEMORY_USE_DONT:
4981 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4982 MEMORY_USE_DONT, because they are modifiers to a call of
4983 expand_expr in the ADDR_EXPR case of expand_expr. */
4984 case EXPAND_CONST_ADDRESS:
4985 case EXPAND_INITIALIZER:
4986 return MEMORY_USE_DONT;
4987 case EXPAND_MEMORY_USE_BAD:
4988 default:
4989 abort ();
4990 }
4991 }
4992 \f
4993 /* Given an rtx VALUE that may contain additions and multiplications,
4994 return an equivalent value that just refers to a register or memory.
4995 This is done by generating instructions to perform the arithmetic
4996 and returning a pseudo-register containing the value.
4997
4998 The returned value may be a REG, SUBREG, MEM or constant. */
4999
5000 rtx
5001 force_operand (value, target)
5002 rtx value, target;
5003 {
5004 register optab binoptab = 0;
5005 /* Use a temporary to force order of execution of calls to
5006 `force_operand'. */
5007 rtx tmp;
5008 register rtx op2;
5009 /* Use subtarget as the target for operand 0 of a binary operation. */
5010 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5011
5012 /* Check for a PIC address load. */
5013 if (flag_pic
5014 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5015 && XEXP (value, 0) == pic_offset_table_rtx
5016 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5017 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5018 || GET_CODE (XEXP (value, 1)) == CONST))
5019 {
5020 if (!subtarget)
5021 subtarget = gen_reg_rtx (GET_MODE (value));
5022 emit_move_insn (subtarget, value);
5023 return subtarget;
5024 }
5025
5026 if (GET_CODE (value) == PLUS)
5027 binoptab = add_optab;
5028 else if (GET_CODE (value) == MINUS)
5029 binoptab = sub_optab;
5030 else if (GET_CODE (value) == MULT)
5031 {
5032 op2 = XEXP (value, 1);
5033 if (!CONSTANT_P (op2)
5034 && !(GET_CODE (op2) == REG && op2 != subtarget))
5035 subtarget = 0;
5036 tmp = force_operand (XEXP (value, 0), subtarget);
5037 return expand_mult (GET_MODE (value), tmp,
5038 force_operand (op2, NULL_RTX),
5039 target, 0);
5040 }
5041
5042 if (binoptab)
5043 {
5044 op2 = XEXP (value, 1);
5045 if (!CONSTANT_P (op2)
5046 && !(GET_CODE (op2) == REG && op2 != subtarget))
5047 subtarget = 0;
5048 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5049 {
5050 binoptab = add_optab;
5051 op2 = negate_rtx (GET_MODE (value), op2);
5052 }
5053
5054 /* Check for an addition with OP2 a constant integer and our first
5055 operand a PLUS of a virtual register and something else. In that
5056 case, we want to emit the sum of the virtual register and the
5057 constant first and then add the other value. This allows virtual
5058 register instantiation to simply modify the constant rather than
5059 creating another one around this addition. */
5060 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5061 && GET_CODE (XEXP (value, 0)) == PLUS
5062 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5063 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5064 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5065 {
5066 rtx temp = expand_binop (GET_MODE (value), binoptab,
5067 XEXP (XEXP (value, 0), 0), op2,
5068 subtarget, 0, OPTAB_LIB_WIDEN);
5069 return expand_binop (GET_MODE (value), binoptab, temp,
5070 force_operand (XEXP (XEXP (value, 0), 1), 0),
5071 target, 0, OPTAB_LIB_WIDEN);
5072 }
5073
5074 tmp = force_operand (XEXP (value, 0), subtarget);
5075 return expand_binop (GET_MODE (value), binoptab, tmp,
5076 force_operand (op2, NULL_RTX),
5077 target, 0, OPTAB_LIB_WIDEN);
5078 /* We give UNSIGNEDP = 0 to expand_binop
5079 because the only operations we are expanding here are signed ones. */
5080 }
5081 return value;
5082 }
5083 \f
5084 /* Subroutine of expand_expr:
5085 save the non-copied parts (LIST) of an expr (LHS), and return a list
5086 which can restore these values to their previous values,
5087 should something modify their storage. */
5088
5089 static tree
5090 save_noncopied_parts (lhs, list)
5091 tree lhs;
5092 tree list;
5093 {
5094 tree tail;
5095 tree parts = 0;
5096
5097 for (tail = list; tail; tail = TREE_CHAIN (tail))
5098 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5099 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5100 else
5101 {
5102 tree part = TREE_VALUE (tail);
5103 tree part_type = TREE_TYPE (part);
5104 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5105 rtx target = assign_temp (part_type, 0, 1, 1);
5106 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5107 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5108 parts = tree_cons (to_be_saved,
5109 build (RTL_EXPR, part_type, NULL_TREE,
5110 (tree) target),
5111 parts);
5112 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5113 }
5114 return parts;
5115 }
5116
5117 /* Subroutine of expand_expr:
5118 record the non-copied parts (LIST) of an expr (LHS), and return a list
5119 which specifies the initial values of these parts. */
5120
5121 static tree
5122 init_noncopied_parts (lhs, list)
5123 tree lhs;
5124 tree list;
5125 {
5126 tree tail;
5127 tree parts = 0;
5128
5129 for (tail = list; tail; tail = TREE_CHAIN (tail))
5130 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5131 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5132 else if (TREE_PURPOSE (tail))
5133 {
5134 tree part = TREE_VALUE (tail);
5135 tree part_type = TREE_TYPE (part);
5136 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5137 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5138 }
5139 return parts;
5140 }
5141
5142 /* Subroutine of expand_expr: return nonzero iff there is no way that
5143 EXP can reference X, which is being modified. TOP_P is nonzero if this
5144 call is going to be used to determine whether we need a temporary
5145 for EXP, as opposed to a recursive call to this function.
5146
5147 It is always safe for this routine to return zero since it merely
5148 searches for optimization opportunities. */
5149
5150 static int
5151 safe_from_p (x, exp, top_p)
5152 rtx x;
5153 tree exp;
5154 int top_p;
5155 {
5156 rtx exp_rtl = 0;
5157 int i, nops;
5158 static int save_expr_count;
5159 static int save_expr_size = 0;
5160 static tree *save_expr_rewritten;
5161 static tree save_expr_trees[256];
5162
5163 if (x == 0
5164 /* If EXP has varying size, we MUST use a target since we currently
5165 have no way of allocating temporaries of variable size
5166 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5167 So we assume here that something at a higher level has prevented a
5168 clash. This is somewhat bogus, but the best we can do. Only
5169 do this when X is BLKmode and when we are at the top level. */
5170 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5171 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5172 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5173 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5174 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5175 != INTEGER_CST)
5176 && GET_MODE (x) == BLKmode))
5177 return 1;
5178
5179 if (top_p && save_expr_size == 0)
5180 {
5181 int rtn;
5182
5183 save_expr_count = 0;
5184 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5185 save_expr_rewritten = &save_expr_trees[0];
5186
5187 rtn = safe_from_p (x, exp, 1);
5188
5189 for (i = 0; i < save_expr_count; ++i)
5190 {
5191 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5192 abort ();
5193 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5194 }
5195
5196 save_expr_size = 0;
5197
5198 return rtn;
5199 }
5200
5201 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5202 find the underlying pseudo. */
5203 if (GET_CODE (x) == SUBREG)
5204 {
5205 x = SUBREG_REG (x);
5206 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5207 return 0;
5208 }
5209
5210 /* If X is a location in the outgoing argument area, it is always safe. */
5211 if (GET_CODE (x) == MEM
5212 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5213 || (GET_CODE (XEXP (x, 0)) == PLUS
5214 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5215 return 1;
5216
5217 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5218 {
5219 case 'd':
5220 exp_rtl = DECL_RTL (exp);
5221 break;
5222
5223 case 'c':
5224 return 1;
5225
5226 case 'x':
5227 if (TREE_CODE (exp) == TREE_LIST)
5228 return ((TREE_VALUE (exp) == 0
5229 || safe_from_p (x, TREE_VALUE (exp), 0))
5230 && (TREE_CHAIN (exp) == 0
5231 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5232 else if (TREE_CODE (exp) == ERROR_MARK)
5233 return 1; /* An already-visited SAVE_EXPR? */
5234 else
5235 return 0;
5236
5237 case '1':
5238 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5239
5240 case '2':
5241 case '<':
5242 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5243 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5244
5245 case 'e':
5246 case 'r':
5247 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5248 the expression. If it is set, we conflict iff we are that rtx or
5249 both are in memory. Otherwise, we check all operands of the
5250 expression recursively. */
5251
5252 switch (TREE_CODE (exp))
5253 {
5254 case ADDR_EXPR:
5255 return (staticp (TREE_OPERAND (exp, 0))
5256 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5257 || TREE_STATIC (exp));
5258
5259 case INDIRECT_REF:
5260 if (GET_CODE (x) == MEM)
5261 return 0;
5262 break;
5263
5264 case CALL_EXPR:
5265 exp_rtl = CALL_EXPR_RTL (exp);
5266 if (exp_rtl == 0)
5267 {
5268 /* Assume that the call will clobber all hard registers and
5269 all of memory. */
5270 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5271 || GET_CODE (x) == MEM)
5272 return 0;
5273 }
5274
5275 break;
5276
5277 case RTL_EXPR:
5278 /* If a sequence exists, we would have to scan every instruction
5279 in the sequence to see if it was safe. This is probably not
5280 worthwhile. */
5281 if (RTL_EXPR_SEQUENCE (exp))
5282 return 0;
5283
5284 exp_rtl = RTL_EXPR_RTL (exp);
5285 break;
5286
5287 case WITH_CLEANUP_EXPR:
5288 exp_rtl = RTL_EXPR_RTL (exp);
5289 break;
5290
5291 case CLEANUP_POINT_EXPR:
5292 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5293
5294 case SAVE_EXPR:
5295 exp_rtl = SAVE_EXPR_RTL (exp);
5296 if (exp_rtl)
5297 break;
5298
5299 /* This SAVE_EXPR might appear many times in the top-level
5300 safe_from_p() expression, and if it has a complex
5301 subexpression, examining it multiple times could result
5302 in a combinatorial explosion. E.g. on an Alpha
5303 running at least 200MHz, a Fortran test case compiled with
5304 optimization took about 28 minutes to compile -- even though
5305 it was only a few lines long, and the complicated line causing
5306 so much time to be spent in the earlier version of safe_from_p()
5307 had only 293 or so unique nodes.
5308
5309 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5310 where it is so we can turn it back in the top-level safe_from_p()
5311 when we're done. */
5312
5313 /* For now, don't bother re-sizing the array. */
5314 if (save_expr_count >= save_expr_size)
5315 return 0;
5316 save_expr_rewritten[save_expr_count++] = exp;
5317
5318 nops = tree_code_length[(int) SAVE_EXPR];
5319 for (i = 0; i < nops; i++)
5320 {
5321 tree operand = TREE_OPERAND (exp, i);
5322 if (operand == NULL_TREE)
5323 continue;
5324 TREE_SET_CODE (exp, ERROR_MARK);
5325 if (!safe_from_p (x, operand, 0))
5326 return 0;
5327 TREE_SET_CODE (exp, SAVE_EXPR);
5328 }
5329 TREE_SET_CODE (exp, ERROR_MARK);
5330 return 1;
5331
5332 case BIND_EXPR:
5333 /* The only operand we look at is operand 1. The rest aren't
5334 part of the expression. */
5335 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5336
5337 case METHOD_CALL_EXPR:
5338 /* This takes a rtx argument, but shouldn't appear here. */
5339 abort ();
5340
5341 default:
5342 break;
5343 }
5344
5345 /* If we have an rtx, we do not need to scan our operands. */
5346 if (exp_rtl)
5347 break;
5348
5349 nops = tree_code_length[(int) TREE_CODE (exp)];
5350 for (i = 0; i < nops; i++)
5351 if (TREE_OPERAND (exp, i) != 0
5352 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5353 return 0;
5354 }
5355
5356 /* If we have an rtl, find any enclosed object. Then see if we conflict
5357 with it. */
5358 if (exp_rtl)
5359 {
5360 if (GET_CODE (exp_rtl) == SUBREG)
5361 {
5362 exp_rtl = SUBREG_REG (exp_rtl);
5363 if (GET_CODE (exp_rtl) == REG
5364 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5365 return 0;
5366 }
5367
5368 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5369 are memory and EXP is not readonly. */
5370 return ! (rtx_equal_p (x, exp_rtl)
5371 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5372 && ! TREE_READONLY (exp)));
5373 }
5374
5375 /* If we reach here, it is safe. */
5376 return 1;
5377 }
5378
5379 /* Subroutine of expand_expr: return nonzero iff EXP is an
5380 expression whose type is statically determinable. */
5381
5382 static int
5383 fixed_type_p (exp)
5384 tree exp;
5385 {
5386 if (TREE_CODE (exp) == PARM_DECL
5387 || TREE_CODE (exp) == VAR_DECL
5388 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5389 || TREE_CODE (exp) == COMPONENT_REF
5390 || TREE_CODE (exp) == ARRAY_REF)
5391 return 1;
5392 return 0;
5393 }
5394
5395 /* Subroutine of expand_expr: return rtx if EXP is a
5396 variable or parameter; else return 0. */
5397
5398 static rtx
5399 var_rtx (exp)
5400 tree exp;
5401 {
5402 STRIP_NOPS (exp);
5403 switch (TREE_CODE (exp))
5404 {
5405 case PARM_DECL:
5406 case VAR_DECL:
5407 return DECL_RTL (exp);
5408 default:
5409 return 0;
5410 }
5411 }
5412
5413 #ifdef MAX_INTEGER_COMPUTATION_MODE
5414 void
5415 check_max_integer_computation_mode (exp)
5416 tree exp;
5417 {
5418 enum tree_code code;
5419 enum machine_mode mode;
5420
5421 /* Strip any NOPs that don't change the mode. */
5422 STRIP_NOPS (exp);
5423 code = TREE_CODE (exp);
5424
5425 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5426 if (code == NOP_EXPR
5427 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5428 return;
5429
5430 /* First check the type of the overall operation. We need only look at
5431 unary, binary and relational operations. */
5432 if (TREE_CODE_CLASS (code) == '1'
5433 || TREE_CODE_CLASS (code) == '2'
5434 || TREE_CODE_CLASS (code) == '<')
5435 {
5436 mode = TYPE_MODE (TREE_TYPE (exp));
5437 if (GET_MODE_CLASS (mode) == MODE_INT
5438 && mode > MAX_INTEGER_COMPUTATION_MODE)
5439 fatal ("unsupported wide integer operation");
5440 }
5441
5442 /* Check operand of a unary op. */
5443 if (TREE_CODE_CLASS (code) == '1')
5444 {
5445 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5446 if (GET_MODE_CLASS (mode) == MODE_INT
5447 && mode > MAX_INTEGER_COMPUTATION_MODE)
5448 fatal ("unsupported wide integer operation");
5449 }
5450
5451 /* Check operands of a binary/comparison op. */
5452 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5453 {
5454 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5455 if (GET_MODE_CLASS (mode) == MODE_INT
5456 && mode > MAX_INTEGER_COMPUTATION_MODE)
5457 fatal ("unsupported wide integer operation");
5458
5459 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5460 if (GET_MODE_CLASS (mode) == MODE_INT
5461 && mode > MAX_INTEGER_COMPUTATION_MODE)
5462 fatal ("unsupported wide integer operation");
5463 }
5464 }
5465 #endif
5466
5467 \f
5468 /* expand_expr: generate code for computing expression EXP.
5469 An rtx for the computed value is returned. The value is never null.
5470 In the case of a void EXP, const0_rtx is returned.
5471
5472 The value may be stored in TARGET if TARGET is nonzero.
5473 TARGET is just a suggestion; callers must assume that
5474 the rtx returned may not be the same as TARGET.
5475
5476 If TARGET is CONST0_RTX, it means that the value will be ignored.
5477
5478 If TMODE is not VOIDmode, it suggests generating the
5479 result in mode TMODE. But this is done only when convenient.
5480 Otherwise, TMODE is ignored and the value generated in its natural mode.
5481 TMODE is just a suggestion; callers must assume that
5482 the rtx returned may not have mode TMODE.
5483
5484 Note that TARGET may have neither TMODE nor MODE. In that case, it
5485 probably will not be used.
5486
5487 If MODIFIER is EXPAND_SUM then when EXP is an addition
5488 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5489 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5490 products as above, or REG or MEM, or constant.
5491 Ordinarily in such cases we would output mul or add instructions
5492 and then return a pseudo reg containing the sum.
5493
5494 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5495 it also marks a label as absolutely required (it can't be dead).
5496 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5497 This is used for outputting expressions used in initializers.
5498
5499 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5500 with a constant address even if that address is not normally legitimate.
5501 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5502
5503 rtx
5504 expand_expr (exp, target, tmode, modifier)
5505 register tree exp;
5506 rtx target;
5507 enum machine_mode tmode;
5508 enum expand_modifier modifier;
5509 {
5510 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5511 This is static so it will be accessible to our recursive callees. */
5512 static tree placeholder_list = 0;
5513 register rtx op0, op1, temp;
5514 tree type = TREE_TYPE (exp);
5515 int unsignedp = TREE_UNSIGNED (type);
5516 register enum machine_mode mode;
5517 register enum tree_code code = TREE_CODE (exp);
5518 optab this_optab;
5519 rtx subtarget, original_target;
5520 int ignore;
5521 tree context;
5522 /* Used by check-memory-usage to make modifier read only. */
5523 enum expand_modifier ro_modifier;
5524
5525 /* Handle ERROR_MARK before anybody tries to access its type. */
5526 if (TREE_CODE (exp) == ERROR_MARK)
5527 {
5528 op0 = CONST0_RTX (tmode);
5529 if (op0 != 0)
5530 return op0;
5531 return const0_rtx;
5532 }
5533
5534 mode = TYPE_MODE (type);
5535 /* Use subtarget as the target for operand 0 of a binary operation. */
5536 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5537 original_target = target;
5538 ignore = (target == const0_rtx
5539 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5540 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5541 || code == COND_EXPR)
5542 && TREE_CODE (type) == VOID_TYPE));
5543
5544 /* Make a read-only version of the modifier. */
5545 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5546 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5547 ro_modifier = modifier;
5548 else
5549 ro_modifier = EXPAND_NORMAL;
5550
5551 /* Don't use hard regs as subtargets, because the combiner
5552 can only handle pseudo regs. */
5553 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5554 subtarget = 0;
5555 /* Avoid subtargets inside loops,
5556 since they hide some invariant expressions. */
5557 if (preserve_subexpressions_p ())
5558 subtarget = 0;
5559
5560 /* If we are going to ignore this result, we need only do something
5561 if there is a side-effect somewhere in the expression. If there
5562 is, short-circuit the most common cases here. Note that we must
5563 not call expand_expr with anything but const0_rtx in case this
5564 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5565
5566 if (ignore)
5567 {
5568 if (! TREE_SIDE_EFFECTS (exp))
5569 return const0_rtx;
5570
5571 /* Ensure we reference a volatile object even if value is ignored. */
5572 if (TREE_THIS_VOLATILE (exp)
5573 && TREE_CODE (exp) != FUNCTION_DECL
5574 && mode != VOIDmode && mode != BLKmode)
5575 {
5576 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5577 if (GET_CODE (temp) == MEM)
5578 temp = copy_to_reg (temp);
5579 return const0_rtx;
5580 }
5581
5582 if (TREE_CODE_CLASS (code) == '1')
5583 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5584 VOIDmode, ro_modifier);
5585 else if (TREE_CODE_CLASS (code) == '2'
5586 || TREE_CODE_CLASS (code) == '<')
5587 {
5588 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5589 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5590 return const0_rtx;
5591 }
5592 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5593 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5594 /* If the second operand has no side effects, just evaluate
5595 the first. */
5596 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5597 VOIDmode, ro_modifier);
5598
5599 target = 0;
5600 }
5601
5602 #ifdef MAX_INTEGER_COMPUTATION_MODE
5603 /* Only check stuff here if the mode we want is different from the mode
5604 of the expression; if it's the same, check_max_integer_computiation_mode
5605 will handle it. Do we really need to check this stuff at all? */
5606
5607 if (target
5608 && GET_MODE (target) != mode
5609 && TREE_CODE (exp) != INTEGER_CST
5610 && TREE_CODE (exp) != PARM_DECL
5611 && TREE_CODE (exp) != ARRAY_REF
5612 && TREE_CODE (exp) != COMPONENT_REF
5613 && TREE_CODE (exp) != BIT_FIELD_REF
5614 && TREE_CODE (exp) != INDIRECT_REF
5615 && TREE_CODE (exp) != CALL_EXPR
5616 && TREE_CODE (exp) != VAR_DECL
5617 && TREE_CODE (exp) != RTL_EXPR)
5618 {
5619 enum machine_mode mode = GET_MODE (target);
5620
5621 if (GET_MODE_CLASS (mode) == MODE_INT
5622 && mode > MAX_INTEGER_COMPUTATION_MODE)
5623 fatal ("unsupported wide integer operation");
5624 }
5625
5626 if (tmode != mode
5627 && TREE_CODE (exp) != INTEGER_CST
5628 && TREE_CODE (exp) != PARM_DECL
5629 && TREE_CODE (exp) != ARRAY_REF
5630 && TREE_CODE (exp) != COMPONENT_REF
5631 && TREE_CODE (exp) != BIT_FIELD_REF
5632 && TREE_CODE (exp) != INDIRECT_REF
5633 && TREE_CODE (exp) != VAR_DECL
5634 && TREE_CODE (exp) != CALL_EXPR
5635 && TREE_CODE (exp) != RTL_EXPR
5636 && GET_MODE_CLASS (tmode) == MODE_INT
5637 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5638 fatal ("unsupported wide integer operation");
5639
5640 check_max_integer_computation_mode (exp);
5641 #endif
5642
5643 /* If will do cse, generate all results into pseudo registers
5644 since 1) that allows cse to find more things
5645 and 2) otherwise cse could produce an insn the machine
5646 cannot support. */
5647
5648 if (! cse_not_expected && mode != BLKmode && target
5649 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5650 target = subtarget;
5651
5652 switch (code)
5653 {
5654 case LABEL_DECL:
5655 {
5656 tree function = decl_function_context (exp);
5657 /* Handle using a label in a containing function. */
5658 if (function != current_function_decl
5659 && function != inline_function_decl && function != 0)
5660 {
5661 struct function *p = find_function_data (function);
5662 /* Allocate in the memory associated with the function
5663 that the label is in. */
5664 push_obstacks (p->function_obstack,
5665 p->function_maybepermanent_obstack);
5666
5667 p->expr->x_forced_labels
5668 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5669 p->expr->x_forced_labels);
5670 pop_obstacks ();
5671 }
5672 else
5673 {
5674 if (modifier == EXPAND_INITIALIZER)
5675 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5676 label_rtx (exp),
5677 forced_labels);
5678 }
5679
5680 temp = gen_rtx_MEM (FUNCTION_MODE,
5681 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5682 if (function != current_function_decl
5683 && function != inline_function_decl && function != 0)
5684 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5685 return temp;
5686 }
5687
5688 case PARM_DECL:
5689 if (DECL_RTL (exp) == 0)
5690 {
5691 error_with_decl (exp, "prior parameter's size depends on `%s'");
5692 return CONST0_RTX (mode);
5693 }
5694
5695 /* ... fall through ... */
5696
5697 case VAR_DECL:
5698 /* If a static var's type was incomplete when the decl was written,
5699 but the type is complete now, lay out the decl now. */
5700 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5701 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5702 {
5703 push_obstacks_nochange ();
5704 end_temporary_allocation ();
5705 layout_decl (exp, 0);
5706 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5707 pop_obstacks ();
5708 }
5709
5710 /* Although static-storage variables start off initialized, according to
5711 ANSI C, a memcpy could overwrite them with uninitialized values. So
5712 we check them too. This also lets us check for read-only variables
5713 accessed via a non-const declaration, in case it won't be detected
5714 any other way (e.g., in an embedded system or OS kernel without
5715 memory protection).
5716
5717 Aggregates are not checked here; they're handled elsewhere. */
5718 if (current_function && current_function_check_memory_usage
5719 && code == VAR_DECL
5720 && GET_CODE (DECL_RTL (exp)) == MEM
5721 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5722 {
5723 enum memory_use_mode memory_usage;
5724 memory_usage = get_memory_usage_from_modifier (modifier);
5725
5726 if (memory_usage != MEMORY_USE_DONT)
5727 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5728 XEXP (DECL_RTL (exp), 0), Pmode,
5729 GEN_INT (int_size_in_bytes (type)),
5730 TYPE_MODE (sizetype),
5731 GEN_INT (memory_usage),
5732 TYPE_MODE (integer_type_node));
5733 }
5734
5735 /* ... fall through ... */
5736
5737 case FUNCTION_DECL:
5738 case RESULT_DECL:
5739 if (DECL_RTL (exp) == 0)
5740 abort ();
5741
5742 /* Ensure variable marked as used even if it doesn't go through
5743 a parser. If it hasn't be used yet, write out an external
5744 definition. */
5745 if (! TREE_USED (exp))
5746 {
5747 assemble_external (exp);
5748 TREE_USED (exp) = 1;
5749 }
5750
5751 /* Show we haven't gotten RTL for this yet. */
5752 temp = 0;
5753
5754 /* Handle variables inherited from containing functions. */
5755 context = decl_function_context (exp);
5756
5757 /* We treat inline_function_decl as an alias for the current function
5758 because that is the inline function whose vars, types, etc.
5759 are being merged into the current function.
5760 See expand_inline_function. */
5761
5762 if (context != 0 && context != current_function_decl
5763 && context != inline_function_decl
5764 /* If var is static, we don't need a static chain to access it. */
5765 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5766 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5767 {
5768 rtx addr;
5769
5770 /* Mark as non-local and addressable. */
5771 DECL_NONLOCAL (exp) = 1;
5772 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5773 abort ();
5774 mark_addressable (exp);
5775 if (GET_CODE (DECL_RTL (exp)) != MEM)
5776 abort ();
5777 addr = XEXP (DECL_RTL (exp), 0);
5778 if (GET_CODE (addr) == MEM)
5779 addr = gen_rtx_MEM (Pmode,
5780 fix_lexical_addr (XEXP (addr, 0), exp));
5781 else
5782 addr = fix_lexical_addr (addr, exp);
5783 temp = change_address (DECL_RTL (exp), mode, addr);
5784 }
5785
5786 /* This is the case of an array whose size is to be determined
5787 from its initializer, while the initializer is still being parsed.
5788 See expand_decl. */
5789
5790 else if (GET_CODE (DECL_RTL (exp)) == MEM
5791 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5792 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5793 XEXP (DECL_RTL (exp), 0));
5794
5795 /* If DECL_RTL is memory, we are in the normal case and either
5796 the address is not valid or it is not a register and -fforce-addr
5797 is specified, get the address into a register. */
5798
5799 else if (GET_CODE (DECL_RTL (exp)) == MEM
5800 && modifier != EXPAND_CONST_ADDRESS
5801 && modifier != EXPAND_SUM
5802 && modifier != EXPAND_INITIALIZER
5803 && (! memory_address_p (DECL_MODE (exp),
5804 XEXP (DECL_RTL (exp), 0))
5805 || (flag_force_addr
5806 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5807 temp = change_address (DECL_RTL (exp), VOIDmode,
5808 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5809
5810 /* If we got something, return it. But first, set the alignment
5811 the address is a register. */
5812 if (temp != 0)
5813 {
5814 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5815 mark_reg_pointer (XEXP (temp, 0),
5816 DECL_ALIGN (exp) / BITS_PER_UNIT);
5817
5818 return temp;
5819 }
5820
5821 /* If the mode of DECL_RTL does not match that of the decl, it
5822 must be a promoted value. We return a SUBREG of the wanted mode,
5823 but mark it so that we know that it was already extended. */
5824
5825 if (GET_CODE (DECL_RTL (exp)) == REG
5826 && GET_MODE (DECL_RTL (exp)) != mode)
5827 {
5828 /* Get the signedness used for this variable. Ensure we get the
5829 same mode we got when the variable was declared. */
5830 if (GET_MODE (DECL_RTL (exp))
5831 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5832 abort ();
5833
5834 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5835 SUBREG_PROMOTED_VAR_P (temp) = 1;
5836 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5837 return temp;
5838 }
5839
5840 return DECL_RTL (exp);
5841
5842 case INTEGER_CST:
5843 return immed_double_const (TREE_INT_CST_LOW (exp),
5844 TREE_INT_CST_HIGH (exp),
5845 mode);
5846
5847 case CONST_DECL:
5848 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5849 EXPAND_MEMORY_USE_BAD);
5850
5851 case REAL_CST:
5852 /* If optimized, generate immediate CONST_DOUBLE
5853 which will be turned into memory by reload if necessary.
5854
5855 We used to force a register so that loop.c could see it. But
5856 this does not allow gen_* patterns to perform optimizations with
5857 the constants. It also produces two insns in cases like "x = 1.0;".
5858 On most machines, floating-point constants are not permitted in
5859 many insns, so we'd end up copying it to a register in any case.
5860
5861 Now, we do the copying in expand_binop, if appropriate. */
5862 return immed_real_const (exp);
5863
5864 case COMPLEX_CST:
5865 case STRING_CST:
5866 if (! TREE_CST_RTL (exp))
5867 output_constant_def (exp);
5868
5869 /* TREE_CST_RTL probably contains a constant address.
5870 On RISC machines where a constant address isn't valid,
5871 make some insns to get that address into a register. */
5872 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5873 && modifier != EXPAND_CONST_ADDRESS
5874 && modifier != EXPAND_INITIALIZER
5875 && modifier != EXPAND_SUM
5876 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5877 || (flag_force_addr
5878 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5879 return change_address (TREE_CST_RTL (exp), VOIDmode,
5880 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5881 return TREE_CST_RTL (exp);
5882
5883 case EXPR_WITH_FILE_LOCATION:
5884 {
5885 rtx to_return;
5886 char *saved_input_filename = input_filename;
5887 int saved_lineno = lineno;
5888 input_filename = EXPR_WFL_FILENAME (exp);
5889 lineno = EXPR_WFL_LINENO (exp);
5890 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5891 emit_line_note (input_filename, lineno);
5892 /* Possibly avoid switching back and force here */
5893 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5894 input_filename = saved_input_filename;
5895 lineno = saved_lineno;
5896 return to_return;
5897 }
5898
5899 case SAVE_EXPR:
5900 context = decl_function_context (exp);
5901
5902 /* If this SAVE_EXPR was at global context, assume we are an
5903 initialization function and move it into our context. */
5904 if (context == 0)
5905 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5906
5907 /* We treat inline_function_decl as an alias for the current function
5908 because that is the inline function whose vars, types, etc.
5909 are being merged into the current function.
5910 See expand_inline_function. */
5911 if (context == current_function_decl || context == inline_function_decl)
5912 context = 0;
5913
5914 /* If this is non-local, handle it. */
5915 if (context)
5916 {
5917 /* The following call just exists to abort if the context is
5918 not of a containing function. */
5919 find_function_data (context);
5920
5921 temp = SAVE_EXPR_RTL (exp);
5922 if (temp && GET_CODE (temp) == REG)
5923 {
5924 put_var_into_stack (exp);
5925 temp = SAVE_EXPR_RTL (exp);
5926 }
5927 if (temp == 0 || GET_CODE (temp) != MEM)
5928 abort ();
5929 return change_address (temp, mode,
5930 fix_lexical_addr (XEXP (temp, 0), exp));
5931 }
5932 if (SAVE_EXPR_RTL (exp) == 0)
5933 {
5934 if (mode == VOIDmode)
5935 temp = const0_rtx;
5936 else
5937 temp = assign_temp (type, 3, 0, 0);
5938
5939 SAVE_EXPR_RTL (exp) = temp;
5940 if (!optimize && GET_CODE (temp) == REG)
5941 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5942 save_expr_regs);
5943
5944 /* If the mode of TEMP does not match that of the expression, it
5945 must be a promoted value. We pass store_expr a SUBREG of the
5946 wanted mode but mark it so that we know that it was already
5947 extended. Note that `unsignedp' was modified above in
5948 this case. */
5949
5950 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5951 {
5952 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5953 SUBREG_PROMOTED_VAR_P (temp) = 1;
5954 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5955 }
5956
5957 if (temp == const0_rtx)
5958 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5959 EXPAND_MEMORY_USE_BAD);
5960 else
5961 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5962
5963 TREE_USED (exp) = 1;
5964 }
5965
5966 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5967 must be a promoted value. We return a SUBREG of the wanted mode,
5968 but mark it so that we know that it was already extended. */
5969
5970 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5971 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5972 {
5973 /* Compute the signedness and make the proper SUBREG. */
5974 promote_mode (type, mode, &unsignedp, 0);
5975 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5976 SUBREG_PROMOTED_VAR_P (temp) = 1;
5977 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5978 return temp;
5979 }
5980
5981 return SAVE_EXPR_RTL (exp);
5982
5983 case UNSAVE_EXPR:
5984 {
5985 rtx temp;
5986 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5987 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5988 return temp;
5989 }
5990
5991 case PLACEHOLDER_EXPR:
5992 {
5993 tree placeholder_expr;
5994
5995 /* If there is an object on the head of the placeholder list,
5996 see if some object in it of type TYPE or a pointer to it. For
5997 further information, see tree.def. */
5998 for (placeholder_expr = placeholder_list;
5999 placeholder_expr != 0;
6000 placeholder_expr = TREE_CHAIN (placeholder_expr))
6001 {
6002 tree need_type = TYPE_MAIN_VARIANT (type);
6003 tree object = 0;
6004 tree old_list = placeholder_list;
6005 tree elt;
6006
6007 /* Find the outermost reference that is of the type we want.
6008 If none, see if any object has a type that is a pointer to
6009 the type we want. */
6010 for (elt = TREE_PURPOSE (placeholder_expr);
6011 elt != 0 && object == 0;
6012 elt
6013 = ((TREE_CODE (elt) == COMPOUND_EXPR
6014 || TREE_CODE (elt) == COND_EXPR)
6015 ? TREE_OPERAND (elt, 1)
6016 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6017 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6018 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6019 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6020 ? TREE_OPERAND (elt, 0) : 0))
6021 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6022 object = elt;
6023
6024 for (elt = TREE_PURPOSE (placeholder_expr);
6025 elt != 0 && object == 0;
6026 elt
6027 = ((TREE_CODE (elt) == COMPOUND_EXPR
6028 || TREE_CODE (elt) == COND_EXPR)
6029 ? TREE_OPERAND (elt, 1)
6030 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6031 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6032 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6033 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6034 ? TREE_OPERAND (elt, 0) : 0))
6035 if (POINTER_TYPE_P (TREE_TYPE (elt))
6036 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6037 == need_type))
6038 object = build1 (INDIRECT_REF, need_type, elt);
6039
6040 if (object != 0)
6041 {
6042 /* Expand this object skipping the list entries before
6043 it was found in case it is also a PLACEHOLDER_EXPR.
6044 In that case, we want to translate it using subsequent
6045 entries. */
6046 placeholder_list = TREE_CHAIN (placeholder_expr);
6047 temp = expand_expr (object, original_target, tmode,
6048 ro_modifier);
6049 placeholder_list = old_list;
6050 return temp;
6051 }
6052 }
6053 }
6054
6055 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6056 abort ();
6057
6058 case WITH_RECORD_EXPR:
6059 /* Put the object on the placeholder list, expand our first operand,
6060 and pop the list. */
6061 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6062 placeholder_list);
6063 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6064 tmode, ro_modifier);
6065 placeholder_list = TREE_CHAIN (placeholder_list);
6066 return target;
6067
6068 case GOTO_EXPR:
6069 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6070 expand_goto (TREE_OPERAND (exp, 0));
6071 else
6072 expand_computed_goto (TREE_OPERAND (exp, 0));
6073 return const0_rtx;
6074
6075 case EXIT_EXPR:
6076 expand_exit_loop_if_false (NULL_PTR,
6077 invert_truthvalue (TREE_OPERAND (exp, 0)));
6078 return const0_rtx;
6079
6080 case LABELED_BLOCK_EXPR:
6081 if (LABELED_BLOCK_BODY (exp))
6082 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6083 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6084 return const0_rtx;
6085
6086 case EXIT_BLOCK_EXPR:
6087 if (EXIT_BLOCK_RETURN (exp))
6088 sorry ("returned value in block_exit_expr");
6089 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6090 return const0_rtx;
6091
6092 case LOOP_EXPR:
6093 push_temp_slots ();
6094 expand_start_loop (1);
6095 expand_expr_stmt (TREE_OPERAND (exp, 0));
6096 expand_end_loop ();
6097 pop_temp_slots ();
6098
6099 return const0_rtx;
6100
6101 case BIND_EXPR:
6102 {
6103 tree vars = TREE_OPERAND (exp, 0);
6104 int vars_need_expansion = 0;
6105
6106 /* Need to open a binding contour here because
6107 if there are any cleanups they must be contained here. */
6108 expand_start_bindings (2);
6109
6110 /* Mark the corresponding BLOCK for output in its proper place. */
6111 if (TREE_OPERAND (exp, 2) != 0
6112 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6113 insert_block (TREE_OPERAND (exp, 2));
6114
6115 /* If VARS have not yet been expanded, expand them now. */
6116 while (vars)
6117 {
6118 if (DECL_RTL (vars) == 0)
6119 {
6120 vars_need_expansion = 1;
6121 expand_decl (vars);
6122 }
6123 expand_decl_init (vars);
6124 vars = TREE_CHAIN (vars);
6125 }
6126
6127 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6128
6129 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6130
6131 return temp;
6132 }
6133
6134 case RTL_EXPR:
6135 if (RTL_EXPR_SEQUENCE (exp))
6136 {
6137 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6138 abort ();
6139 emit_insns (RTL_EXPR_SEQUENCE (exp));
6140 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6141 }
6142 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6143 free_temps_for_rtl_expr (exp);
6144 return RTL_EXPR_RTL (exp);
6145
6146 case CONSTRUCTOR:
6147 /* If we don't need the result, just ensure we evaluate any
6148 subexpressions. */
6149 if (ignore)
6150 {
6151 tree elt;
6152 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6153 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6154 EXPAND_MEMORY_USE_BAD);
6155 return const0_rtx;
6156 }
6157
6158 /* All elts simple constants => refer to a constant in memory. But
6159 if this is a non-BLKmode mode, let it store a field at a time
6160 since that should make a CONST_INT or CONST_DOUBLE when we
6161 fold. Likewise, if we have a target we can use, it is best to
6162 store directly into the target unless the type is large enough
6163 that memcpy will be used. If we are making an initializer and
6164 all operands are constant, put it in memory as well. */
6165 else if ((TREE_STATIC (exp)
6166 && ((mode == BLKmode
6167 && ! (target != 0 && safe_from_p (target, exp, 1)))
6168 || TREE_ADDRESSABLE (exp)
6169 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6170 && (!MOVE_BY_PIECES_P
6171 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6172 TYPE_ALIGN (type) / BITS_PER_UNIT))
6173 && ! mostly_zeros_p (exp))))
6174 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6175 {
6176 rtx constructor = output_constant_def (exp);
6177 if (modifier != EXPAND_CONST_ADDRESS
6178 && modifier != EXPAND_INITIALIZER
6179 && modifier != EXPAND_SUM
6180 && (! memory_address_p (GET_MODE (constructor),
6181 XEXP (constructor, 0))
6182 || (flag_force_addr
6183 && GET_CODE (XEXP (constructor, 0)) != REG)))
6184 constructor = change_address (constructor, VOIDmode,
6185 XEXP (constructor, 0));
6186 return constructor;
6187 }
6188
6189 else
6190 {
6191 /* Handle calls that pass values in multiple non-contiguous
6192 locations. The Irix 6 ABI has examples of this. */
6193 if (target == 0 || ! safe_from_p (target, exp, 1)
6194 || GET_CODE (target) == PARALLEL)
6195 {
6196 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6197 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6198 else
6199 target = assign_temp (type, 0, 1, 1);
6200 }
6201
6202 if (TREE_READONLY (exp))
6203 {
6204 if (GET_CODE (target) == MEM)
6205 target = copy_rtx (target);
6206
6207 RTX_UNCHANGING_P (target) = 1;
6208 }
6209
6210 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
6211 return target;
6212 }
6213
6214 case INDIRECT_REF:
6215 {
6216 tree exp1 = TREE_OPERAND (exp, 0);
6217 tree exp2;
6218 tree index;
6219 tree string = string_constant (exp1, &index);
6220 int i;
6221
6222 /* Try to optimize reads from const strings. */
6223 if (string
6224 && TREE_CODE (string) == STRING_CST
6225 && TREE_CODE (index) == INTEGER_CST
6226 && !TREE_INT_CST_HIGH (index)
6227 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6228 && GET_MODE_CLASS (mode) == MODE_INT
6229 && GET_MODE_SIZE (mode) == 1
6230 && modifier != EXPAND_MEMORY_USE_WO)
6231 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6232
6233 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6234 op0 = memory_address (mode, op0);
6235
6236 if (current_function && current_function_check_memory_usage
6237 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6238 {
6239 enum memory_use_mode memory_usage;
6240 memory_usage = get_memory_usage_from_modifier (modifier);
6241
6242 if (memory_usage != MEMORY_USE_DONT)
6243 {
6244 in_check_memory_usage = 1;
6245 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6246 op0, Pmode,
6247 GEN_INT (int_size_in_bytes (type)),
6248 TYPE_MODE (sizetype),
6249 GEN_INT (memory_usage),
6250 TYPE_MODE (integer_type_node));
6251 in_check_memory_usage = 0;
6252 }
6253 }
6254
6255 temp = gen_rtx_MEM (mode, op0);
6256 /* If address was computed by addition,
6257 mark this as an element of an aggregate. */
6258 if (TREE_CODE (exp1) == PLUS_EXPR
6259 || (TREE_CODE (exp1) == SAVE_EXPR
6260 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6261 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6262 || (TREE_CODE (exp1) == ADDR_EXPR
6263 && (exp2 = TREE_OPERAND (exp1, 0))
6264 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6265 MEM_SET_IN_STRUCT_P (temp, 1);
6266
6267 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6268 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6269
6270 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6271 here, because, in C and C++, the fact that a location is accessed
6272 through a pointer to const does not mean that the value there can
6273 never change. Languages where it can never change should
6274 also set TREE_STATIC. */
6275 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6276 return temp;
6277 }
6278
6279 case ARRAY_REF:
6280 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6281 abort ();
6282
6283 {
6284 tree array = TREE_OPERAND (exp, 0);
6285 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6286 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6287 tree index = TREE_OPERAND (exp, 1);
6288 tree index_type = TREE_TYPE (index);
6289 HOST_WIDE_INT i;
6290
6291 /* Optimize the special-case of a zero lower bound.
6292
6293 We convert the low_bound to sizetype to avoid some problems
6294 with constant folding. (E.g. suppose the lower bound is 1,
6295 and its mode is QI. Without the conversion, (ARRAY
6296 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6297 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6298
6299 But sizetype isn't quite right either (especially if
6300 the lowbound is negative). FIXME */
6301
6302 if (! integer_zerop (low_bound))
6303 index = fold (build (MINUS_EXPR, index_type, index,
6304 convert (sizetype, low_bound)));
6305
6306 /* Fold an expression like: "foo"[2].
6307 This is not done in fold so it won't happen inside &.
6308 Don't fold if this is for wide characters since it's too
6309 difficult to do correctly and this is a very rare case. */
6310
6311 if (TREE_CODE (array) == STRING_CST
6312 && TREE_CODE (index) == INTEGER_CST
6313 && !TREE_INT_CST_HIGH (index)
6314 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6315 && GET_MODE_CLASS (mode) == MODE_INT
6316 && GET_MODE_SIZE (mode) == 1)
6317 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6318
6319 /* If this is a constant index into a constant array,
6320 just get the value from the array. Handle both the cases when
6321 we have an explicit constructor and when our operand is a variable
6322 that was declared const. */
6323
6324 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6325 {
6326 if (TREE_CODE (index) == INTEGER_CST
6327 && TREE_INT_CST_HIGH (index) == 0)
6328 {
6329 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6330
6331 i = TREE_INT_CST_LOW (index);
6332 while (elem && i--)
6333 elem = TREE_CHAIN (elem);
6334 if (elem)
6335 return expand_expr (fold (TREE_VALUE (elem)), target,
6336 tmode, ro_modifier);
6337 }
6338 }
6339
6340 else if (optimize >= 1
6341 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6342 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6343 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6344 {
6345 if (TREE_CODE (index) == INTEGER_CST)
6346 {
6347 tree init = DECL_INITIAL (array);
6348
6349 i = TREE_INT_CST_LOW (index);
6350 if (TREE_CODE (init) == CONSTRUCTOR)
6351 {
6352 tree elem = CONSTRUCTOR_ELTS (init);
6353
6354 while (elem
6355 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6356 elem = TREE_CHAIN (elem);
6357 if (elem)
6358 return expand_expr (fold (TREE_VALUE (elem)), target,
6359 tmode, ro_modifier);
6360 }
6361 else if (TREE_CODE (init) == STRING_CST
6362 && TREE_INT_CST_HIGH (index) == 0
6363 && (TREE_INT_CST_LOW (index)
6364 < TREE_STRING_LENGTH (init)))
6365 return (GEN_INT
6366 (TREE_STRING_POINTER
6367 (init)[TREE_INT_CST_LOW (index)]));
6368 }
6369 }
6370 }
6371
6372 /* ... fall through ... */
6373
6374 case COMPONENT_REF:
6375 case BIT_FIELD_REF:
6376 /* If the operand is a CONSTRUCTOR, we can just extract the
6377 appropriate field if it is present. Don't do this if we have
6378 already written the data since we want to refer to that copy
6379 and varasm.c assumes that's what we'll do. */
6380 if (code != ARRAY_REF
6381 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6382 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6383 {
6384 tree elt;
6385
6386 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6387 elt = TREE_CHAIN (elt))
6388 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6389 /* We can normally use the value of the field in the
6390 CONSTRUCTOR. However, if this is a bitfield in
6391 an integral mode that we can fit in a HOST_WIDE_INT,
6392 we must mask only the number of bits in the bitfield,
6393 since this is done implicitly by the constructor. If
6394 the bitfield does not meet either of those conditions,
6395 we can't do this optimization. */
6396 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6397 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6398 == MODE_INT)
6399 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6400 <= HOST_BITS_PER_WIDE_INT))))
6401 {
6402 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6403 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6404 {
6405 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6406
6407 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6408 {
6409 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6410 op0 = expand_and (op0, op1, target);
6411 }
6412 else
6413 {
6414 enum machine_mode imode
6415 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6416 tree count
6417 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6418 0);
6419
6420 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6421 target, 0);
6422 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6423 target, 0);
6424 }
6425 }
6426
6427 return op0;
6428 }
6429 }
6430
6431 {
6432 enum machine_mode mode1;
6433 int bitsize;
6434 int bitpos;
6435 tree offset;
6436 int volatilep = 0;
6437 int alignment;
6438 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6439 &mode1, &unsignedp, &volatilep,
6440 &alignment);
6441
6442 /* If we got back the original object, something is wrong. Perhaps
6443 we are evaluating an expression too early. In any event, don't
6444 infinitely recurse. */
6445 if (tem == exp)
6446 abort ();
6447
6448 /* If TEM's type is a union of variable size, pass TARGET to the inner
6449 computation, since it will need a temporary and TARGET is known
6450 to have to do. This occurs in unchecked conversion in Ada. */
6451
6452 op0 = expand_expr (tem,
6453 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6454 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6455 != INTEGER_CST)
6456 ? target : NULL_RTX),
6457 VOIDmode,
6458 modifier == EXPAND_INITIALIZER
6459 ? modifier : EXPAND_NORMAL);
6460
6461 /* If this is a constant, put it into a register if it is a
6462 legitimate constant and memory if it isn't. */
6463 if (CONSTANT_P (op0))
6464 {
6465 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6466 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6467 op0 = force_reg (mode, op0);
6468 else
6469 op0 = validize_mem (force_const_mem (mode, op0));
6470 }
6471
6472 if (offset != 0)
6473 {
6474 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6475
6476 if (GET_CODE (op0) != MEM)
6477 abort ();
6478
6479 if (GET_MODE (offset_rtx) != ptr_mode)
6480 {
6481 #ifdef POINTERS_EXTEND_UNSIGNED
6482 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6483 #else
6484 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6485 #endif
6486 }
6487
6488 /* A constant address in TO_RTX can have VOIDmode, we must not try
6489 to call force_reg for that case. Avoid that case. */
6490 if (GET_CODE (op0) == MEM
6491 && GET_MODE (op0) == BLKmode
6492 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6493 && bitsize
6494 && (bitpos % bitsize) == 0
6495 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6496 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6497 {
6498 rtx temp = change_address (op0, mode1,
6499 plus_constant (XEXP (op0, 0),
6500 (bitpos /
6501 BITS_PER_UNIT)));
6502 if (GET_CODE (XEXP (temp, 0)) == REG)
6503 op0 = temp;
6504 else
6505 op0 = change_address (op0, mode1,
6506 force_reg (GET_MODE (XEXP (temp, 0)),
6507 XEXP (temp, 0)));
6508 bitpos = 0;
6509 }
6510
6511
6512 op0 = change_address (op0, VOIDmode,
6513 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6514 force_reg (ptr_mode,
6515 offset_rtx)));
6516 }
6517
6518 /* Don't forget about volatility even if this is a bitfield. */
6519 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6520 {
6521 op0 = copy_rtx (op0);
6522 MEM_VOLATILE_P (op0) = 1;
6523 }
6524
6525 /* Check the access. */
6526 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6527 {
6528 enum memory_use_mode memory_usage;
6529 memory_usage = get_memory_usage_from_modifier (modifier);
6530
6531 if (memory_usage != MEMORY_USE_DONT)
6532 {
6533 rtx to;
6534 int size;
6535
6536 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6537 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6538
6539 /* Check the access right of the pointer. */
6540 if (size > BITS_PER_UNIT)
6541 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6542 to, Pmode,
6543 GEN_INT (size / BITS_PER_UNIT),
6544 TYPE_MODE (sizetype),
6545 GEN_INT (memory_usage),
6546 TYPE_MODE (integer_type_node));
6547 }
6548 }
6549
6550 /* In cases where an aligned union has an unaligned object
6551 as a field, we might be extracting a BLKmode value from
6552 an integer-mode (e.g., SImode) object. Handle this case
6553 by doing the extract into an object as wide as the field
6554 (which we know to be the width of a basic mode), then
6555 storing into memory, and changing the mode to BLKmode.
6556 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6557 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6558 if (mode1 == VOIDmode
6559 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6560 || (modifier != EXPAND_CONST_ADDRESS
6561 && modifier != EXPAND_INITIALIZER
6562 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6563 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6564 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6565 /* If the field isn't aligned enough to fetch as a memref,
6566 fetch it as a bit field. */
6567 || (SLOW_UNALIGNED_ACCESS
6568 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6569 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6570 {
6571 enum machine_mode ext_mode = mode;
6572
6573 if (ext_mode == BLKmode)
6574 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6575
6576 if (ext_mode == BLKmode)
6577 {
6578 /* In this case, BITPOS must start at a byte boundary and
6579 TARGET, if specified, must be a MEM. */
6580 if (GET_CODE (op0) != MEM
6581 || (target != 0 && GET_CODE (target) != MEM)
6582 || bitpos % BITS_PER_UNIT != 0)
6583 abort ();
6584
6585 op0 = change_address (op0, VOIDmode,
6586 plus_constant (XEXP (op0, 0),
6587 bitpos / BITS_PER_UNIT));
6588 if (target == 0)
6589 target = assign_temp (type, 0, 1, 1);
6590
6591 emit_block_move (target, op0,
6592 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6593 / BITS_PER_UNIT),
6594 1);
6595
6596 return target;
6597 }
6598
6599 op0 = validize_mem (op0);
6600
6601 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6602 mark_reg_pointer (XEXP (op0, 0), alignment);
6603
6604 op0 = extract_bit_field (op0, bitsize, bitpos,
6605 unsignedp, target, ext_mode, ext_mode,
6606 alignment,
6607 int_size_in_bytes (TREE_TYPE (tem)));
6608
6609 /* If the result is a record type and BITSIZE is narrower than
6610 the mode of OP0, an integral mode, and this is a big endian
6611 machine, we must put the field into the high-order bits. */
6612 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6613 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6614 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6615 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6616 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6617 - bitsize),
6618 op0, 1);
6619
6620 if (mode == BLKmode)
6621 {
6622 rtx new = assign_stack_temp (ext_mode,
6623 bitsize / BITS_PER_UNIT, 0);
6624
6625 emit_move_insn (new, op0);
6626 op0 = copy_rtx (new);
6627 PUT_MODE (op0, BLKmode);
6628 MEM_SET_IN_STRUCT_P (op0, 1);
6629 }
6630
6631 return op0;
6632 }
6633
6634 /* If the result is BLKmode, use that to access the object
6635 now as well. */
6636 if (mode == BLKmode)
6637 mode1 = BLKmode;
6638
6639 /* Get a reference to just this component. */
6640 if (modifier == EXPAND_CONST_ADDRESS
6641 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6642 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6643 (bitpos / BITS_PER_UNIT)));
6644 else
6645 op0 = change_address (op0, mode1,
6646 plus_constant (XEXP (op0, 0),
6647 (bitpos / BITS_PER_UNIT)));
6648
6649 if (GET_CODE (op0) == MEM)
6650 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6651
6652 if (GET_CODE (XEXP (op0, 0)) == REG)
6653 mark_reg_pointer (XEXP (op0, 0), alignment);
6654
6655 MEM_SET_IN_STRUCT_P (op0, 1);
6656 MEM_VOLATILE_P (op0) |= volatilep;
6657 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6658 || modifier == EXPAND_CONST_ADDRESS
6659 || modifier == EXPAND_INITIALIZER)
6660 return op0;
6661 else if (target == 0)
6662 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6663
6664 convert_move (target, op0, unsignedp);
6665 return target;
6666 }
6667
6668 /* Intended for a reference to a buffer of a file-object in Pascal.
6669 But it's not certain that a special tree code will really be
6670 necessary for these. INDIRECT_REF might work for them. */
6671 case BUFFER_REF:
6672 abort ();
6673
6674 case IN_EXPR:
6675 {
6676 /* Pascal set IN expression.
6677
6678 Algorithm:
6679 rlo = set_low - (set_low%bits_per_word);
6680 the_word = set [ (index - rlo)/bits_per_word ];
6681 bit_index = index % bits_per_word;
6682 bitmask = 1 << bit_index;
6683 return !!(the_word & bitmask); */
6684
6685 tree set = TREE_OPERAND (exp, 0);
6686 tree index = TREE_OPERAND (exp, 1);
6687 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6688 tree set_type = TREE_TYPE (set);
6689 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6690 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6691 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6692 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6693 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6694 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6695 rtx setaddr = XEXP (setval, 0);
6696 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6697 rtx rlow;
6698 rtx diff, quo, rem, addr, bit, result;
6699
6700 preexpand_calls (exp);
6701
6702 /* If domain is empty, answer is no. Likewise if index is constant
6703 and out of bounds. */
6704 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6705 && TREE_CODE (set_low_bound) == INTEGER_CST
6706 && tree_int_cst_lt (set_high_bound, set_low_bound))
6707 || (TREE_CODE (index) == INTEGER_CST
6708 && TREE_CODE (set_low_bound) == INTEGER_CST
6709 && tree_int_cst_lt (index, set_low_bound))
6710 || (TREE_CODE (set_high_bound) == INTEGER_CST
6711 && TREE_CODE (index) == INTEGER_CST
6712 && tree_int_cst_lt (set_high_bound, index))))
6713 return const0_rtx;
6714
6715 if (target == 0)
6716 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6717
6718 /* If we get here, we have to generate the code for both cases
6719 (in range and out of range). */
6720
6721 op0 = gen_label_rtx ();
6722 op1 = gen_label_rtx ();
6723
6724 if (! (GET_CODE (index_val) == CONST_INT
6725 && GET_CODE (lo_r) == CONST_INT))
6726 {
6727 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6728 GET_MODE (index_val), iunsignedp, 0, op1);
6729 }
6730
6731 if (! (GET_CODE (index_val) == CONST_INT
6732 && GET_CODE (hi_r) == CONST_INT))
6733 {
6734 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6735 GET_MODE (index_val), iunsignedp, 0, op1);
6736 }
6737
6738 /* Calculate the element number of bit zero in the first word
6739 of the set. */
6740 if (GET_CODE (lo_r) == CONST_INT)
6741 rlow = GEN_INT (INTVAL (lo_r)
6742 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6743 else
6744 rlow = expand_binop (index_mode, and_optab, lo_r,
6745 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6746 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6747
6748 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6749 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6750
6751 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6752 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6753 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6754 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6755
6756 addr = memory_address (byte_mode,
6757 expand_binop (index_mode, add_optab, diff,
6758 setaddr, NULL_RTX, iunsignedp,
6759 OPTAB_LIB_WIDEN));
6760
6761 /* Extract the bit we want to examine */
6762 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6763 gen_rtx_MEM (byte_mode, addr),
6764 make_tree (TREE_TYPE (index), rem),
6765 NULL_RTX, 1);
6766 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6767 GET_MODE (target) == byte_mode ? target : 0,
6768 1, OPTAB_LIB_WIDEN);
6769
6770 if (result != target)
6771 convert_move (target, result, 1);
6772
6773 /* Output the code to handle the out-of-range case. */
6774 emit_jump (op0);
6775 emit_label (op1);
6776 emit_move_insn (target, const0_rtx);
6777 emit_label (op0);
6778 return target;
6779 }
6780
6781 case WITH_CLEANUP_EXPR:
6782 if (RTL_EXPR_RTL (exp) == 0)
6783 {
6784 RTL_EXPR_RTL (exp)
6785 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6786 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6787
6788 /* That's it for this cleanup. */
6789 TREE_OPERAND (exp, 2) = 0;
6790 }
6791 return RTL_EXPR_RTL (exp);
6792
6793 case CLEANUP_POINT_EXPR:
6794 {
6795 /* Start a new binding layer that will keep track of all cleanup
6796 actions to be performed. */
6797 expand_start_bindings (2);
6798
6799 target_temp_slot_level = temp_slot_level;
6800
6801 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6802 /* If we're going to use this value, load it up now. */
6803 if (! ignore)
6804 op0 = force_not_mem (op0);
6805 preserve_temp_slots (op0);
6806 expand_end_bindings (NULL_TREE, 0, 0);
6807 }
6808 return op0;
6809
6810 case CALL_EXPR:
6811 /* Check for a built-in function. */
6812 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6813 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6814 == FUNCTION_DECL)
6815 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6816 return expand_builtin (exp, target, subtarget, tmode, ignore);
6817
6818 /* If this call was expanded already by preexpand_calls,
6819 just return the result we got. */
6820 if (CALL_EXPR_RTL (exp) != 0)
6821 return CALL_EXPR_RTL (exp);
6822
6823 return expand_call (exp, target, ignore);
6824
6825 case NON_LVALUE_EXPR:
6826 case NOP_EXPR:
6827 case CONVERT_EXPR:
6828 case REFERENCE_EXPR:
6829 if (TREE_CODE (type) == UNION_TYPE)
6830 {
6831 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6832 if (target == 0)
6833 {
6834 if (mode != BLKmode)
6835 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6836 else
6837 target = assign_temp (type, 0, 1, 1);
6838 }
6839
6840 if (GET_CODE (target) == MEM)
6841 /* Store data into beginning of memory target. */
6842 store_expr (TREE_OPERAND (exp, 0),
6843 change_address (target, TYPE_MODE (valtype), 0), 0);
6844
6845 else if (GET_CODE (target) == REG)
6846 /* Store this field into a union of the proper type. */
6847 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6848 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6849 VOIDmode, 0, 1,
6850 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6851 0);
6852 else
6853 abort ();
6854
6855 /* Return the entire union. */
6856 return target;
6857 }
6858
6859 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6860 {
6861 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6862 ro_modifier);
6863
6864 /* If the signedness of the conversion differs and OP0 is
6865 a promoted SUBREG, clear that indication since we now
6866 have to do the proper extension. */
6867 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6868 && GET_CODE (op0) == SUBREG)
6869 SUBREG_PROMOTED_VAR_P (op0) = 0;
6870
6871 return op0;
6872 }
6873
6874 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6875 if (GET_MODE (op0) == mode)
6876 return op0;
6877
6878 /* If OP0 is a constant, just convert it into the proper mode. */
6879 if (CONSTANT_P (op0))
6880 return
6881 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6882 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6883
6884 if (modifier == EXPAND_INITIALIZER)
6885 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6886
6887 if (target == 0)
6888 return
6889 convert_to_mode (mode, op0,
6890 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6891 else
6892 convert_move (target, op0,
6893 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6894 return target;
6895
6896 case PLUS_EXPR:
6897 /* We come here from MINUS_EXPR when the second operand is a
6898 constant. */
6899 plus_expr:
6900 this_optab = add_optab;
6901
6902 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6903 something else, make sure we add the register to the constant and
6904 then to the other thing. This case can occur during strength
6905 reduction and doing it this way will produce better code if the
6906 frame pointer or argument pointer is eliminated.
6907
6908 fold-const.c will ensure that the constant is always in the inner
6909 PLUS_EXPR, so the only case we need to do anything about is if
6910 sp, ap, or fp is our second argument, in which case we must swap
6911 the innermost first argument and our second argument. */
6912
6913 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6914 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6915 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6916 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6917 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6918 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6919 {
6920 tree t = TREE_OPERAND (exp, 1);
6921
6922 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6923 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6924 }
6925
6926 /* If the result is to be ptr_mode and we are adding an integer to
6927 something, we might be forming a constant. So try to use
6928 plus_constant. If it produces a sum and we can't accept it,
6929 use force_operand. This allows P = &ARR[const] to generate
6930 efficient code on machines where a SYMBOL_REF is not a valid
6931 address.
6932
6933 If this is an EXPAND_SUM call, always return the sum. */
6934 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6935 || mode == ptr_mode)
6936 {
6937 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6938 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6939 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6940 {
6941 rtx constant_part;
6942
6943 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6944 EXPAND_SUM);
6945 /* Use immed_double_const to ensure that the constant is
6946 truncated according to the mode of OP1, then sign extended
6947 to a HOST_WIDE_INT. Using the constant directly can result
6948 in non-canonical RTL in a 64x32 cross compile. */
6949 constant_part
6950 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
6951 (HOST_WIDE_INT) 0,
6952 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
6953 op1 = plus_constant (op1, INTVAL (constant_part));
6954 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6955 op1 = force_operand (op1, target);
6956 return op1;
6957 }
6958
6959 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6960 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6961 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6962 {
6963 rtx constant_part;
6964
6965 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6966 EXPAND_SUM);
6967 if (! CONSTANT_P (op0))
6968 {
6969 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6970 VOIDmode, modifier);
6971 /* Don't go to both_summands if modifier
6972 says it's not right to return a PLUS. */
6973 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6974 goto binop2;
6975 goto both_summands;
6976 }
6977 /* Use immed_double_const to ensure that the constant is
6978 truncated according to the mode of OP1, then sign extended
6979 to a HOST_WIDE_INT. Using the constant directly can result
6980 in non-canonical RTL in a 64x32 cross compile. */
6981 constant_part
6982 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
6983 (HOST_WIDE_INT) 0,
6984 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6985 op0 = plus_constant (op0, INTVAL (constant_part));
6986 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6987 op0 = force_operand (op0, target);
6988 return op0;
6989 }
6990 }
6991
6992 /* No sense saving up arithmetic to be done
6993 if it's all in the wrong mode to form part of an address.
6994 And force_operand won't know whether to sign-extend or
6995 zero-extend. */
6996 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6997 || mode != ptr_mode)
6998 goto binop;
6999
7000 preexpand_calls (exp);
7001 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7002 subtarget = 0;
7003
7004 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7005 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7006
7007 both_summands:
7008 /* Make sure any term that's a sum with a constant comes last. */
7009 if (GET_CODE (op0) == PLUS
7010 && CONSTANT_P (XEXP (op0, 1)))
7011 {
7012 temp = op0;
7013 op0 = op1;
7014 op1 = temp;
7015 }
7016 /* If adding to a sum including a constant,
7017 associate it to put the constant outside. */
7018 if (GET_CODE (op1) == PLUS
7019 && CONSTANT_P (XEXP (op1, 1)))
7020 {
7021 rtx constant_term = const0_rtx;
7022
7023 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7024 if (temp != 0)
7025 op0 = temp;
7026 /* Ensure that MULT comes first if there is one. */
7027 else if (GET_CODE (op0) == MULT)
7028 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7029 else
7030 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7031
7032 /* Let's also eliminate constants from op0 if possible. */
7033 op0 = eliminate_constant_term (op0, &constant_term);
7034
7035 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7036 their sum should be a constant. Form it into OP1, since the
7037 result we want will then be OP0 + OP1. */
7038
7039 temp = simplify_binary_operation (PLUS, mode, constant_term,
7040 XEXP (op1, 1));
7041 if (temp != 0)
7042 op1 = temp;
7043 else
7044 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7045 }
7046
7047 /* Put a constant term last and put a multiplication first. */
7048 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7049 temp = op1, op1 = op0, op0 = temp;
7050
7051 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7052 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7053
7054 case MINUS_EXPR:
7055 /* For initializers, we are allowed to return a MINUS of two
7056 symbolic constants. Here we handle all cases when both operands
7057 are constant. */
7058 /* Handle difference of two symbolic constants,
7059 for the sake of an initializer. */
7060 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7061 && really_constant_p (TREE_OPERAND (exp, 0))
7062 && really_constant_p (TREE_OPERAND (exp, 1)))
7063 {
7064 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7065 VOIDmode, ro_modifier);
7066 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7067 VOIDmode, ro_modifier);
7068
7069 /* If the last operand is a CONST_INT, use plus_constant of
7070 the negated constant. Else make the MINUS. */
7071 if (GET_CODE (op1) == CONST_INT)
7072 return plus_constant (op0, - INTVAL (op1));
7073 else
7074 return gen_rtx_MINUS (mode, op0, op1);
7075 }
7076 /* Convert A - const to A + (-const). */
7077 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7078 {
7079 tree negated = fold (build1 (NEGATE_EXPR, type,
7080 TREE_OPERAND (exp, 1)));
7081
7082 /* Deal with the case where we can't negate the constant
7083 in TYPE. */
7084 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7085 {
7086 tree newtype = signed_type (type);
7087 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7088 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7089 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7090
7091 if (! TREE_OVERFLOW (newneg))
7092 return expand_expr (convert (type,
7093 build (PLUS_EXPR, newtype,
7094 newop0, newneg)),
7095 target, tmode, ro_modifier);
7096 }
7097 else
7098 {
7099 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7100 goto plus_expr;
7101 }
7102 }
7103 this_optab = sub_optab;
7104 goto binop;
7105
7106 case MULT_EXPR:
7107 preexpand_calls (exp);
7108 /* If first operand is constant, swap them.
7109 Thus the following special case checks need only
7110 check the second operand. */
7111 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7112 {
7113 register tree t1 = TREE_OPERAND (exp, 0);
7114 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7115 TREE_OPERAND (exp, 1) = t1;
7116 }
7117
7118 /* Attempt to return something suitable for generating an
7119 indexed address, for machines that support that. */
7120
7121 if (modifier == EXPAND_SUM && mode == ptr_mode
7122 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7123 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7124 {
7125 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7126 EXPAND_SUM);
7127
7128 /* Apply distributive law if OP0 is x+c. */
7129 if (GET_CODE (op0) == PLUS
7130 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7131 return
7132 gen_rtx_PLUS
7133 (mode,
7134 gen_rtx_MULT
7135 (mode, XEXP (op0, 0),
7136 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7137 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7138 * INTVAL (XEXP (op0, 1))));
7139
7140 if (GET_CODE (op0) != REG)
7141 op0 = force_operand (op0, NULL_RTX);
7142 if (GET_CODE (op0) != REG)
7143 op0 = copy_to_mode_reg (mode, op0);
7144
7145 return
7146 gen_rtx_MULT (mode, op0,
7147 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7148 }
7149
7150 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7151 subtarget = 0;
7152
7153 /* Check for multiplying things that have been extended
7154 from a narrower type. If this machine supports multiplying
7155 in that narrower type with a result in the desired type,
7156 do it that way, and avoid the explicit type-conversion. */
7157 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7158 && TREE_CODE (type) == INTEGER_TYPE
7159 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7160 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7161 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7162 && int_fits_type_p (TREE_OPERAND (exp, 1),
7163 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7164 /* Don't use a widening multiply if a shift will do. */
7165 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7166 > HOST_BITS_PER_WIDE_INT)
7167 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7168 ||
7169 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7170 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7171 ==
7172 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7173 /* If both operands are extended, they must either both
7174 be zero-extended or both be sign-extended. */
7175 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7176 ==
7177 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7178 {
7179 enum machine_mode innermode
7180 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7181 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7182 ? smul_widen_optab : umul_widen_optab);
7183 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7184 ? umul_widen_optab : smul_widen_optab);
7185 if (mode == GET_MODE_WIDER_MODE (innermode))
7186 {
7187 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7188 {
7189 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7190 NULL_RTX, VOIDmode, 0);
7191 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7192 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7193 VOIDmode, 0);
7194 else
7195 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7196 NULL_RTX, VOIDmode, 0);
7197 goto binop2;
7198 }
7199 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7200 && innermode == word_mode)
7201 {
7202 rtx htem;
7203 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7204 NULL_RTX, VOIDmode, 0);
7205 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7206 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7207 VOIDmode, 0);
7208 else
7209 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7210 NULL_RTX, VOIDmode, 0);
7211 temp = expand_binop (mode, other_optab, op0, op1, target,
7212 unsignedp, OPTAB_LIB_WIDEN);
7213 htem = expand_mult_highpart_adjust (innermode,
7214 gen_highpart (innermode, temp),
7215 op0, op1,
7216 gen_highpart (innermode, temp),
7217 unsignedp);
7218 emit_move_insn (gen_highpart (innermode, temp), htem);
7219 return temp;
7220 }
7221 }
7222 }
7223 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7224 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7225 return expand_mult (mode, op0, op1, target, unsignedp);
7226
7227 case TRUNC_DIV_EXPR:
7228 case FLOOR_DIV_EXPR:
7229 case CEIL_DIV_EXPR:
7230 case ROUND_DIV_EXPR:
7231 case EXACT_DIV_EXPR:
7232 preexpand_calls (exp);
7233 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7234 subtarget = 0;
7235 /* Possible optimization: compute the dividend with EXPAND_SUM
7236 then if the divisor is constant can optimize the case
7237 where some terms of the dividend have coeffs divisible by it. */
7238 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7239 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7240 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7241
7242 case RDIV_EXPR:
7243 this_optab = flodiv_optab;
7244 goto binop;
7245
7246 case TRUNC_MOD_EXPR:
7247 case FLOOR_MOD_EXPR:
7248 case CEIL_MOD_EXPR:
7249 case ROUND_MOD_EXPR:
7250 preexpand_calls (exp);
7251 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7252 subtarget = 0;
7253 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7254 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7255 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7256
7257 case FIX_ROUND_EXPR:
7258 case FIX_FLOOR_EXPR:
7259 case FIX_CEIL_EXPR:
7260 abort (); /* Not used for C. */
7261
7262 case FIX_TRUNC_EXPR:
7263 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7264 if (target == 0)
7265 target = gen_reg_rtx (mode);
7266 expand_fix (target, op0, unsignedp);
7267 return target;
7268
7269 case FLOAT_EXPR:
7270 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7271 if (target == 0)
7272 target = gen_reg_rtx (mode);
7273 /* expand_float can't figure out what to do if FROM has VOIDmode.
7274 So give it the correct mode. With -O, cse will optimize this. */
7275 if (GET_MODE (op0) == VOIDmode)
7276 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7277 op0);
7278 expand_float (target, op0,
7279 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7280 return target;
7281
7282 case NEGATE_EXPR:
7283 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7284 temp = expand_unop (mode, neg_optab, op0, target, 0);
7285 if (temp == 0)
7286 abort ();
7287 return temp;
7288
7289 case ABS_EXPR:
7290 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7291
7292 /* Handle complex values specially. */
7293 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7294 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7295 return expand_complex_abs (mode, op0, target, unsignedp);
7296
7297 /* Unsigned abs is simply the operand. Testing here means we don't
7298 risk generating incorrect code below. */
7299 if (TREE_UNSIGNED (type))
7300 return op0;
7301
7302 return expand_abs (mode, op0, target,
7303 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7304
7305 case MAX_EXPR:
7306 case MIN_EXPR:
7307 target = original_target;
7308 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7309 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7310 || GET_MODE (target) != mode
7311 || (GET_CODE (target) == REG
7312 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7313 target = gen_reg_rtx (mode);
7314 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7315 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7316
7317 /* First try to do it with a special MIN or MAX instruction.
7318 If that does not win, use a conditional jump to select the proper
7319 value. */
7320 this_optab = (TREE_UNSIGNED (type)
7321 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7322 : (code == MIN_EXPR ? smin_optab : smax_optab));
7323
7324 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7325 OPTAB_WIDEN);
7326 if (temp != 0)
7327 return temp;
7328
7329 /* At this point, a MEM target is no longer useful; we will get better
7330 code without it. */
7331
7332 if (GET_CODE (target) == MEM)
7333 target = gen_reg_rtx (mode);
7334
7335 if (target != op0)
7336 emit_move_insn (target, op0);
7337
7338 op0 = gen_label_rtx ();
7339
7340 /* If this mode is an integer too wide to compare properly,
7341 compare word by word. Rely on cse to optimize constant cases. */
7342 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode))
7343 {
7344 if (code == MAX_EXPR)
7345 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7346 target, op1, NULL_RTX, op0);
7347 else
7348 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7349 op1, target, NULL_RTX, op0);
7350 }
7351 else
7352 {
7353 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7354 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7355 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7356 op0);
7357 }
7358 emit_move_insn (target, op1);
7359 emit_label (op0);
7360 return target;
7361
7362 case BIT_NOT_EXPR:
7363 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7364 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7365 if (temp == 0)
7366 abort ();
7367 return temp;
7368
7369 case FFS_EXPR:
7370 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7371 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7372 if (temp == 0)
7373 abort ();
7374 return temp;
7375
7376 /* ??? Can optimize bitwise operations with one arg constant.
7377 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7378 and (a bitwise1 b) bitwise2 b (etc)
7379 but that is probably not worth while. */
7380
7381 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7382 boolean values when we want in all cases to compute both of them. In
7383 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7384 as actual zero-or-1 values and then bitwise anding. In cases where
7385 there cannot be any side effects, better code would be made by
7386 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7387 how to recognize those cases. */
7388
7389 case TRUTH_AND_EXPR:
7390 case BIT_AND_EXPR:
7391 this_optab = and_optab;
7392 goto binop;
7393
7394 case TRUTH_OR_EXPR:
7395 case BIT_IOR_EXPR:
7396 this_optab = ior_optab;
7397 goto binop;
7398
7399 case TRUTH_XOR_EXPR:
7400 case BIT_XOR_EXPR:
7401 this_optab = xor_optab;
7402 goto binop;
7403
7404 case LSHIFT_EXPR:
7405 case RSHIFT_EXPR:
7406 case LROTATE_EXPR:
7407 case RROTATE_EXPR:
7408 preexpand_calls (exp);
7409 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7410 subtarget = 0;
7411 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7412 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7413 unsignedp);
7414
7415 /* Could determine the answer when only additive constants differ. Also,
7416 the addition of one can be handled by changing the condition. */
7417 case LT_EXPR:
7418 case LE_EXPR:
7419 case GT_EXPR:
7420 case GE_EXPR:
7421 case EQ_EXPR:
7422 case NE_EXPR:
7423 preexpand_calls (exp);
7424 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7425 if (temp != 0)
7426 return temp;
7427
7428 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7429 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7430 && original_target
7431 && GET_CODE (original_target) == REG
7432 && (GET_MODE (original_target)
7433 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7434 {
7435 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7436 VOIDmode, 0);
7437
7438 if (temp != original_target)
7439 temp = copy_to_reg (temp);
7440
7441 op1 = gen_label_rtx ();
7442 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7443 GET_MODE (temp), unsignedp, 0, op1);
7444 emit_move_insn (temp, const1_rtx);
7445 emit_label (op1);
7446 return temp;
7447 }
7448
7449 /* If no set-flag instruction, must generate a conditional
7450 store into a temporary variable. Drop through
7451 and handle this like && and ||. */
7452
7453 case TRUTH_ANDIF_EXPR:
7454 case TRUTH_ORIF_EXPR:
7455 if (! ignore
7456 && (target == 0 || ! safe_from_p (target, exp, 1)
7457 /* Make sure we don't have a hard reg (such as function's return
7458 value) live across basic blocks, if not optimizing. */
7459 || (!optimize && GET_CODE (target) == REG
7460 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7461 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7462
7463 if (target)
7464 emit_clr_insn (target);
7465
7466 op1 = gen_label_rtx ();
7467 jumpifnot (exp, op1);
7468
7469 if (target)
7470 emit_0_to_1_insn (target);
7471
7472 emit_label (op1);
7473 return ignore ? const0_rtx : target;
7474
7475 case TRUTH_NOT_EXPR:
7476 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7477 /* The parser is careful to generate TRUTH_NOT_EXPR
7478 only with operands that are always zero or one. */
7479 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7480 target, 1, OPTAB_LIB_WIDEN);
7481 if (temp == 0)
7482 abort ();
7483 return temp;
7484
7485 case COMPOUND_EXPR:
7486 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7487 emit_queue ();
7488 return expand_expr (TREE_OPERAND (exp, 1),
7489 (ignore ? const0_rtx : target),
7490 VOIDmode, 0);
7491
7492 case COND_EXPR:
7493 /* If we would have a "singleton" (see below) were it not for a
7494 conversion in each arm, bring that conversion back out. */
7495 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7496 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7497 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7498 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7499 {
7500 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7501 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7502
7503 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7504 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7505 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7506 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7507 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7508 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7509 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7510 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7511 return expand_expr (build1 (NOP_EXPR, type,
7512 build (COND_EXPR, TREE_TYPE (true),
7513 TREE_OPERAND (exp, 0),
7514 true, false)),
7515 target, tmode, modifier);
7516 }
7517
7518 {
7519 /* Note that COND_EXPRs whose type is a structure or union
7520 are required to be constructed to contain assignments of
7521 a temporary variable, so that we can evaluate them here
7522 for side effect only. If type is void, we must do likewise. */
7523
7524 /* If an arm of the branch requires a cleanup,
7525 only that cleanup is performed. */
7526
7527 tree singleton = 0;
7528 tree binary_op = 0, unary_op = 0;
7529
7530 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7531 convert it to our mode, if necessary. */
7532 if (integer_onep (TREE_OPERAND (exp, 1))
7533 && integer_zerop (TREE_OPERAND (exp, 2))
7534 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7535 {
7536 if (ignore)
7537 {
7538 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7539 ro_modifier);
7540 return const0_rtx;
7541 }
7542
7543 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7544 if (GET_MODE (op0) == mode)
7545 return op0;
7546
7547 if (target == 0)
7548 target = gen_reg_rtx (mode);
7549 convert_move (target, op0, unsignedp);
7550 return target;
7551 }
7552
7553 /* Check for X ? A + B : A. If we have this, we can copy A to the
7554 output and conditionally add B. Similarly for unary operations.
7555 Don't do this if X has side-effects because those side effects
7556 might affect A or B and the "?" operation is a sequence point in
7557 ANSI. (operand_equal_p tests for side effects.) */
7558
7559 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7560 && operand_equal_p (TREE_OPERAND (exp, 2),
7561 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7562 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7563 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7564 && operand_equal_p (TREE_OPERAND (exp, 1),
7565 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7566 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7567 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7568 && operand_equal_p (TREE_OPERAND (exp, 2),
7569 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7570 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7571 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7572 && operand_equal_p (TREE_OPERAND (exp, 1),
7573 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7574 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7575
7576 /* If we are not to produce a result, we have no target. Otherwise,
7577 if a target was specified use it; it will not be used as an
7578 intermediate target unless it is safe. If no target, use a
7579 temporary. */
7580
7581 if (ignore)
7582 temp = 0;
7583 else if (original_target
7584 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7585 || (singleton && GET_CODE (original_target) == REG
7586 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7587 && original_target == var_rtx (singleton)))
7588 && GET_MODE (original_target) == mode
7589 #ifdef HAVE_conditional_move
7590 && (! can_conditionally_move_p (mode)
7591 || GET_CODE (original_target) == REG
7592 || TREE_ADDRESSABLE (type))
7593 #endif
7594 && ! (GET_CODE (original_target) == MEM
7595 && MEM_VOLATILE_P (original_target)))
7596 temp = original_target;
7597 else if (TREE_ADDRESSABLE (type))
7598 abort ();
7599 else
7600 temp = assign_temp (type, 0, 0, 1);
7601
7602 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7603 do the test of X as a store-flag operation, do this as
7604 A + ((X != 0) << log C). Similarly for other simple binary
7605 operators. Only do for C == 1 if BRANCH_COST is low. */
7606 if (temp && singleton && binary_op
7607 && (TREE_CODE (binary_op) == PLUS_EXPR
7608 || TREE_CODE (binary_op) == MINUS_EXPR
7609 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7610 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7611 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7612 : integer_onep (TREE_OPERAND (binary_op, 1)))
7613 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7614 {
7615 rtx result;
7616 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7617 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7618 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7619 : xor_optab);
7620
7621 /* If we had X ? A : A + 1, do this as A + (X == 0).
7622
7623 We have to invert the truth value here and then put it
7624 back later if do_store_flag fails. We cannot simply copy
7625 TREE_OPERAND (exp, 0) to another variable and modify that
7626 because invert_truthvalue can modify the tree pointed to
7627 by its argument. */
7628 if (singleton == TREE_OPERAND (exp, 1))
7629 TREE_OPERAND (exp, 0)
7630 = invert_truthvalue (TREE_OPERAND (exp, 0));
7631
7632 result = do_store_flag (TREE_OPERAND (exp, 0),
7633 (safe_from_p (temp, singleton, 1)
7634 ? temp : NULL_RTX),
7635 mode, BRANCH_COST <= 1);
7636
7637 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7638 result = expand_shift (LSHIFT_EXPR, mode, result,
7639 build_int_2 (tree_log2
7640 (TREE_OPERAND
7641 (binary_op, 1)),
7642 0),
7643 (safe_from_p (temp, singleton, 1)
7644 ? temp : NULL_RTX), 0);
7645
7646 if (result)
7647 {
7648 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7649 return expand_binop (mode, boptab, op1, result, temp,
7650 unsignedp, OPTAB_LIB_WIDEN);
7651 }
7652 else if (singleton == TREE_OPERAND (exp, 1))
7653 TREE_OPERAND (exp, 0)
7654 = invert_truthvalue (TREE_OPERAND (exp, 0));
7655 }
7656
7657 do_pending_stack_adjust ();
7658 NO_DEFER_POP;
7659 op0 = gen_label_rtx ();
7660
7661 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7662 {
7663 if (temp != 0)
7664 {
7665 /* If the target conflicts with the other operand of the
7666 binary op, we can't use it. Also, we can't use the target
7667 if it is a hard register, because evaluating the condition
7668 might clobber it. */
7669 if ((binary_op
7670 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7671 || (GET_CODE (temp) == REG
7672 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7673 temp = gen_reg_rtx (mode);
7674 store_expr (singleton, temp, 0);
7675 }
7676 else
7677 expand_expr (singleton,
7678 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7679 if (singleton == TREE_OPERAND (exp, 1))
7680 jumpif (TREE_OPERAND (exp, 0), op0);
7681 else
7682 jumpifnot (TREE_OPERAND (exp, 0), op0);
7683
7684 start_cleanup_deferral ();
7685 if (binary_op && temp == 0)
7686 /* Just touch the other operand. */
7687 expand_expr (TREE_OPERAND (binary_op, 1),
7688 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7689 else if (binary_op)
7690 store_expr (build (TREE_CODE (binary_op), type,
7691 make_tree (type, temp),
7692 TREE_OPERAND (binary_op, 1)),
7693 temp, 0);
7694 else
7695 store_expr (build1 (TREE_CODE (unary_op), type,
7696 make_tree (type, temp)),
7697 temp, 0);
7698 op1 = op0;
7699 }
7700 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7701 comparison operator. If we have one of these cases, set the
7702 output to A, branch on A (cse will merge these two references),
7703 then set the output to FOO. */
7704 else if (temp
7705 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7706 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7707 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7708 TREE_OPERAND (exp, 1), 0)
7709 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7710 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7711 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7712 {
7713 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7714 temp = gen_reg_rtx (mode);
7715 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7716 jumpif (TREE_OPERAND (exp, 0), op0);
7717
7718 start_cleanup_deferral ();
7719 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7720 op1 = op0;
7721 }
7722 else if (temp
7723 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7724 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7725 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7726 TREE_OPERAND (exp, 2), 0)
7727 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7728 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7729 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7730 {
7731 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7732 temp = gen_reg_rtx (mode);
7733 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7734 jumpifnot (TREE_OPERAND (exp, 0), op0);
7735
7736 start_cleanup_deferral ();
7737 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7738 op1 = op0;
7739 }
7740 else
7741 {
7742 op1 = gen_label_rtx ();
7743 jumpifnot (TREE_OPERAND (exp, 0), op0);
7744
7745 start_cleanup_deferral ();
7746
7747 /* One branch of the cond can be void, if it never returns. For
7748 example A ? throw : E */
7749 if (temp != 0
7750 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7751 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7752 else
7753 expand_expr (TREE_OPERAND (exp, 1),
7754 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7755 end_cleanup_deferral ();
7756 emit_queue ();
7757 emit_jump_insn (gen_jump (op1));
7758 emit_barrier ();
7759 emit_label (op0);
7760 start_cleanup_deferral ();
7761 if (temp != 0
7762 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7763 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7764 else
7765 expand_expr (TREE_OPERAND (exp, 2),
7766 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7767 }
7768
7769 end_cleanup_deferral ();
7770
7771 emit_queue ();
7772 emit_label (op1);
7773 OK_DEFER_POP;
7774
7775 return temp;
7776 }
7777
7778 case TARGET_EXPR:
7779 {
7780 /* Something needs to be initialized, but we didn't know
7781 where that thing was when building the tree. For example,
7782 it could be the return value of a function, or a parameter
7783 to a function which lays down in the stack, or a temporary
7784 variable which must be passed by reference.
7785
7786 We guarantee that the expression will either be constructed
7787 or copied into our original target. */
7788
7789 tree slot = TREE_OPERAND (exp, 0);
7790 tree cleanups = NULL_TREE;
7791 tree exp1;
7792
7793 if (TREE_CODE (slot) != VAR_DECL)
7794 abort ();
7795
7796 if (! ignore)
7797 target = original_target;
7798
7799 if (target == 0)
7800 {
7801 if (DECL_RTL (slot) != 0)
7802 {
7803 target = DECL_RTL (slot);
7804 /* If we have already expanded the slot, so don't do
7805 it again. (mrs) */
7806 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7807 return target;
7808 }
7809 else
7810 {
7811 target = assign_temp (type, 2, 0, 1);
7812 /* All temp slots at this level must not conflict. */
7813 preserve_temp_slots (target);
7814 DECL_RTL (slot) = target;
7815 if (TREE_ADDRESSABLE (slot))
7816 {
7817 TREE_ADDRESSABLE (slot) = 0;
7818 mark_addressable (slot);
7819 }
7820
7821 /* Since SLOT is not known to the called function
7822 to belong to its stack frame, we must build an explicit
7823 cleanup. This case occurs when we must build up a reference
7824 to pass the reference as an argument. In this case,
7825 it is very likely that such a reference need not be
7826 built here. */
7827
7828 if (TREE_OPERAND (exp, 2) == 0)
7829 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7830 cleanups = TREE_OPERAND (exp, 2);
7831 }
7832 }
7833 else
7834 {
7835 /* This case does occur, when expanding a parameter which
7836 needs to be constructed on the stack. The target
7837 is the actual stack address that we want to initialize.
7838 The function we call will perform the cleanup in this case. */
7839
7840 /* If we have already assigned it space, use that space,
7841 not target that we were passed in, as our target
7842 parameter is only a hint. */
7843 if (DECL_RTL (slot) != 0)
7844 {
7845 target = DECL_RTL (slot);
7846 /* If we have already expanded the slot, so don't do
7847 it again. (mrs) */
7848 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7849 return target;
7850 }
7851 else
7852 {
7853 DECL_RTL (slot) = target;
7854 /* If we must have an addressable slot, then make sure that
7855 the RTL that we just stored in slot is OK. */
7856 if (TREE_ADDRESSABLE (slot))
7857 {
7858 TREE_ADDRESSABLE (slot) = 0;
7859 mark_addressable (slot);
7860 }
7861 }
7862 }
7863
7864 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7865 /* Mark it as expanded. */
7866 TREE_OPERAND (exp, 1) = NULL_TREE;
7867
7868 TREE_USED (slot) = 1;
7869 store_expr (exp1, target, 0);
7870
7871 expand_decl_cleanup (NULL_TREE, cleanups);
7872
7873 return target;
7874 }
7875
7876 case INIT_EXPR:
7877 {
7878 tree lhs = TREE_OPERAND (exp, 0);
7879 tree rhs = TREE_OPERAND (exp, 1);
7880 tree noncopied_parts = 0;
7881 tree lhs_type = TREE_TYPE (lhs);
7882
7883 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7884 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7885 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7886 TYPE_NONCOPIED_PARTS (lhs_type));
7887 while (noncopied_parts != 0)
7888 {
7889 expand_assignment (TREE_VALUE (noncopied_parts),
7890 TREE_PURPOSE (noncopied_parts), 0, 0);
7891 noncopied_parts = TREE_CHAIN (noncopied_parts);
7892 }
7893 return temp;
7894 }
7895
7896 case MODIFY_EXPR:
7897 {
7898 /* If lhs is complex, expand calls in rhs before computing it.
7899 That's so we don't compute a pointer and save it over a call.
7900 If lhs is simple, compute it first so we can give it as a
7901 target if the rhs is just a call. This avoids an extra temp and copy
7902 and that prevents a partial-subsumption which makes bad code.
7903 Actually we could treat component_ref's of vars like vars. */
7904
7905 tree lhs = TREE_OPERAND (exp, 0);
7906 tree rhs = TREE_OPERAND (exp, 1);
7907 tree noncopied_parts = 0;
7908 tree lhs_type = TREE_TYPE (lhs);
7909
7910 temp = 0;
7911
7912 if (TREE_CODE (lhs) != VAR_DECL
7913 && TREE_CODE (lhs) != RESULT_DECL
7914 && TREE_CODE (lhs) != PARM_DECL
7915 && ! (TREE_CODE (lhs) == INDIRECT_REF
7916 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7917 preexpand_calls (exp);
7918
7919 /* Check for |= or &= of a bitfield of size one into another bitfield
7920 of size 1. In this case, (unless we need the result of the
7921 assignment) we can do this more efficiently with a
7922 test followed by an assignment, if necessary.
7923
7924 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7925 things change so we do, this code should be enhanced to
7926 support it. */
7927 if (ignore
7928 && TREE_CODE (lhs) == COMPONENT_REF
7929 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7930 || TREE_CODE (rhs) == BIT_AND_EXPR)
7931 && TREE_OPERAND (rhs, 0) == lhs
7932 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7933 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7934 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7935 {
7936 rtx label = gen_label_rtx ();
7937
7938 do_jump (TREE_OPERAND (rhs, 1),
7939 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7940 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7941 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7942 (TREE_CODE (rhs) == BIT_IOR_EXPR
7943 ? integer_one_node
7944 : integer_zero_node)),
7945 0, 0);
7946 do_pending_stack_adjust ();
7947 emit_label (label);
7948 return const0_rtx;
7949 }
7950
7951 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7952 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7953 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7954 TYPE_NONCOPIED_PARTS (lhs_type));
7955
7956 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7957 while (noncopied_parts != 0)
7958 {
7959 expand_assignment (TREE_PURPOSE (noncopied_parts),
7960 TREE_VALUE (noncopied_parts), 0, 0);
7961 noncopied_parts = TREE_CHAIN (noncopied_parts);
7962 }
7963 return temp;
7964 }
7965
7966 case RETURN_EXPR:
7967 if (!TREE_OPERAND (exp, 0))
7968 expand_null_return ();
7969 else
7970 expand_return (TREE_OPERAND (exp, 0));
7971 return const0_rtx;
7972
7973 case PREINCREMENT_EXPR:
7974 case PREDECREMENT_EXPR:
7975 return expand_increment (exp, 0, ignore);
7976
7977 case POSTINCREMENT_EXPR:
7978 case POSTDECREMENT_EXPR:
7979 /* Faster to treat as pre-increment if result is not used. */
7980 return expand_increment (exp, ! ignore, ignore);
7981
7982 case ADDR_EXPR:
7983 /* If nonzero, TEMP will be set to the address of something that might
7984 be a MEM corresponding to a stack slot. */
7985 temp = 0;
7986
7987 /* Are we taking the address of a nested function? */
7988 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7989 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7990 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7991 && ! TREE_STATIC (exp))
7992 {
7993 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7994 op0 = force_operand (op0, target);
7995 }
7996 /* If we are taking the address of something erroneous, just
7997 return a zero. */
7998 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7999 return const0_rtx;
8000 else
8001 {
8002 /* We make sure to pass const0_rtx down if we came in with
8003 ignore set, to avoid doing the cleanups twice for something. */
8004 op0 = expand_expr (TREE_OPERAND (exp, 0),
8005 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8006 (modifier == EXPAND_INITIALIZER
8007 ? modifier : EXPAND_CONST_ADDRESS));
8008
8009 /* If we are going to ignore the result, OP0 will have been set
8010 to const0_rtx, so just return it. Don't get confused and
8011 think we are taking the address of the constant. */
8012 if (ignore)
8013 return op0;
8014
8015 op0 = protect_from_queue (op0, 0);
8016
8017 /* We would like the object in memory. If it is a constant, we can
8018 have it be statically allocated into memory. For a non-constant,
8019 we need to allocate some memory and store the value into it. */
8020
8021 if (CONSTANT_P (op0))
8022 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8023 op0);
8024 else if (GET_CODE (op0) == MEM)
8025 {
8026 mark_temp_addr_taken (op0);
8027 temp = XEXP (op0, 0);
8028 }
8029
8030 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8031 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8032 {
8033 /* If this object is in a register, it must be not
8034 be BLKmode. */
8035 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8036 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8037
8038 mark_temp_addr_taken (memloc);
8039 emit_move_insn (memloc, op0);
8040 op0 = memloc;
8041 }
8042
8043 if (GET_CODE (op0) != MEM)
8044 abort ();
8045
8046 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8047 {
8048 temp = XEXP (op0, 0);
8049 #ifdef POINTERS_EXTEND_UNSIGNED
8050 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8051 && mode == ptr_mode)
8052 temp = convert_memory_address (ptr_mode, temp);
8053 #endif
8054 return temp;
8055 }
8056
8057 op0 = force_operand (XEXP (op0, 0), target);
8058 }
8059
8060 if (flag_force_addr && GET_CODE (op0) != REG)
8061 op0 = force_reg (Pmode, op0);
8062
8063 if (GET_CODE (op0) == REG
8064 && ! REG_USERVAR_P (op0))
8065 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8066
8067 /* If we might have had a temp slot, add an equivalent address
8068 for it. */
8069 if (temp != 0)
8070 update_temp_slot_address (temp, op0);
8071
8072 #ifdef POINTERS_EXTEND_UNSIGNED
8073 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8074 && mode == ptr_mode)
8075 op0 = convert_memory_address (ptr_mode, op0);
8076 #endif
8077
8078 return op0;
8079
8080 case ENTRY_VALUE_EXPR:
8081 abort ();
8082
8083 /* COMPLEX type for Extended Pascal & Fortran */
8084 case COMPLEX_EXPR:
8085 {
8086 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8087 rtx insns;
8088
8089 /* Get the rtx code of the operands. */
8090 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8091 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8092
8093 if (! target)
8094 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8095
8096 start_sequence ();
8097
8098 /* Move the real (op0) and imaginary (op1) parts to their location. */
8099 emit_move_insn (gen_realpart (mode, target), op0);
8100 emit_move_insn (gen_imagpart (mode, target), op1);
8101
8102 insns = get_insns ();
8103 end_sequence ();
8104
8105 /* Complex construction should appear as a single unit. */
8106 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8107 each with a separate pseudo as destination.
8108 It's not correct for flow to treat them as a unit. */
8109 if (GET_CODE (target) != CONCAT)
8110 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8111 else
8112 emit_insns (insns);
8113
8114 return target;
8115 }
8116
8117 case REALPART_EXPR:
8118 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8119 return gen_realpart (mode, op0);
8120
8121 case IMAGPART_EXPR:
8122 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8123 return gen_imagpart (mode, op0);
8124
8125 case CONJ_EXPR:
8126 {
8127 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8128 rtx imag_t;
8129 rtx insns;
8130
8131 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8132
8133 if (! target)
8134 target = gen_reg_rtx (mode);
8135
8136 start_sequence ();
8137
8138 /* Store the realpart and the negated imagpart to target. */
8139 emit_move_insn (gen_realpart (partmode, target),
8140 gen_realpart (partmode, op0));
8141
8142 imag_t = gen_imagpart (partmode, target);
8143 temp = expand_unop (partmode, neg_optab,
8144 gen_imagpart (partmode, op0), imag_t, 0);
8145 if (temp != imag_t)
8146 emit_move_insn (imag_t, temp);
8147
8148 insns = get_insns ();
8149 end_sequence ();
8150
8151 /* Conjugate should appear as a single unit
8152 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8153 each with a separate pseudo as destination.
8154 It's not correct for flow to treat them as a unit. */
8155 if (GET_CODE (target) != CONCAT)
8156 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8157 else
8158 emit_insns (insns);
8159
8160 return target;
8161 }
8162
8163 case TRY_CATCH_EXPR:
8164 {
8165 tree handler = TREE_OPERAND (exp, 1);
8166
8167 expand_eh_region_start ();
8168
8169 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8170
8171 expand_eh_region_end (handler);
8172
8173 return op0;
8174 }
8175
8176 case TRY_FINALLY_EXPR:
8177 {
8178 tree try_block = TREE_OPERAND (exp, 0);
8179 tree finally_block = TREE_OPERAND (exp, 1);
8180 rtx finally_label = gen_label_rtx ();
8181 rtx done_label = gen_label_rtx ();
8182 rtx return_link = gen_reg_rtx (Pmode);
8183 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8184 (tree) finally_label, (tree) return_link);
8185 TREE_SIDE_EFFECTS (cleanup) = 1;
8186
8187 /* Start a new binding layer that will keep track of all cleanup
8188 actions to be performed. */
8189 expand_start_bindings (2);
8190
8191 target_temp_slot_level = temp_slot_level;
8192
8193 expand_decl_cleanup (NULL_TREE, cleanup);
8194 op0 = expand_expr (try_block, target, tmode, modifier);
8195
8196 preserve_temp_slots (op0);
8197 expand_end_bindings (NULL_TREE, 0, 0);
8198 emit_jump (done_label);
8199 emit_label (finally_label);
8200 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8201 emit_indirect_jump (return_link);
8202 emit_label (done_label);
8203 return op0;
8204 }
8205
8206 case GOTO_SUBROUTINE_EXPR:
8207 {
8208 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8209 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8210 rtx return_address = gen_label_rtx ();
8211 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8212 emit_jump (subr);
8213 emit_label (return_address);
8214 return const0_rtx;
8215 }
8216
8217 case POPDCC_EXPR:
8218 {
8219 rtx dcc = get_dynamic_cleanup_chain ();
8220 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8221 return const0_rtx;
8222 }
8223
8224 case POPDHC_EXPR:
8225 {
8226 rtx dhc = get_dynamic_handler_chain ();
8227 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8228 return const0_rtx;
8229 }
8230
8231 case VA_ARG_EXPR:
8232 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8233
8234 default:
8235 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8236 }
8237
8238 /* Here to do an ordinary binary operator, generating an instruction
8239 from the optab already placed in `this_optab'. */
8240 binop:
8241 preexpand_calls (exp);
8242 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8243 subtarget = 0;
8244 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8245 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8246 binop2:
8247 temp = expand_binop (mode, this_optab, op0, op1, target,
8248 unsignedp, OPTAB_LIB_WIDEN);
8249 if (temp == 0)
8250 abort ();
8251 return temp;
8252 }
8253 \f
8254 /* Return the tree node and offset if a given argument corresponds to
8255 a string constant. */
8256
8257 tree
8258 string_constant (arg, ptr_offset)
8259 tree arg;
8260 tree *ptr_offset;
8261 {
8262 STRIP_NOPS (arg);
8263
8264 if (TREE_CODE (arg) == ADDR_EXPR
8265 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8266 {
8267 *ptr_offset = integer_zero_node;
8268 return TREE_OPERAND (arg, 0);
8269 }
8270 else if (TREE_CODE (arg) == PLUS_EXPR)
8271 {
8272 tree arg0 = TREE_OPERAND (arg, 0);
8273 tree arg1 = TREE_OPERAND (arg, 1);
8274
8275 STRIP_NOPS (arg0);
8276 STRIP_NOPS (arg1);
8277
8278 if (TREE_CODE (arg0) == ADDR_EXPR
8279 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8280 {
8281 *ptr_offset = arg1;
8282 return TREE_OPERAND (arg0, 0);
8283 }
8284 else if (TREE_CODE (arg1) == ADDR_EXPR
8285 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8286 {
8287 *ptr_offset = arg0;
8288 return TREE_OPERAND (arg1, 0);
8289 }
8290 }
8291
8292 return 0;
8293 }
8294 \f
8295 /* Expand code for a post- or pre- increment or decrement
8296 and return the RTX for the result.
8297 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8298
8299 static rtx
8300 expand_increment (exp, post, ignore)
8301 register tree exp;
8302 int post, ignore;
8303 {
8304 register rtx op0, op1;
8305 register rtx temp, value;
8306 register tree incremented = TREE_OPERAND (exp, 0);
8307 optab this_optab = add_optab;
8308 int icode;
8309 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8310 int op0_is_copy = 0;
8311 int single_insn = 0;
8312 /* 1 means we can't store into OP0 directly,
8313 because it is a subreg narrower than a word,
8314 and we don't dare clobber the rest of the word. */
8315 int bad_subreg = 0;
8316
8317 /* Stabilize any component ref that might need to be
8318 evaluated more than once below. */
8319 if (!post
8320 || TREE_CODE (incremented) == BIT_FIELD_REF
8321 || (TREE_CODE (incremented) == COMPONENT_REF
8322 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8323 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8324 incremented = stabilize_reference (incremented);
8325 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8326 ones into save exprs so that they don't accidentally get evaluated
8327 more than once by the code below. */
8328 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8329 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8330 incremented = save_expr (incremented);
8331
8332 /* Compute the operands as RTX.
8333 Note whether OP0 is the actual lvalue or a copy of it:
8334 I believe it is a copy iff it is a register or subreg
8335 and insns were generated in computing it. */
8336
8337 temp = get_last_insn ();
8338 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8339
8340 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8341 in place but instead must do sign- or zero-extension during assignment,
8342 so we copy it into a new register and let the code below use it as
8343 a copy.
8344
8345 Note that we can safely modify this SUBREG since it is know not to be
8346 shared (it was made by the expand_expr call above). */
8347
8348 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8349 {
8350 if (post)
8351 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8352 else
8353 bad_subreg = 1;
8354 }
8355 else if (GET_CODE (op0) == SUBREG
8356 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8357 {
8358 /* We cannot increment this SUBREG in place. If we are
8359 post-incrementing, get a copy of the old value. Otherwise,
8360 just mark that we cannot increment in place. */
8361 if (post)
8362 op0 = copy_to_reg (op0);
8363 else
8364 bad_subreg = 1;
8365 }
8366
8367 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8368 && temp != get_last_insn ());
8369 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8370 EXPAND_MEMORY_USE_BAD);
8371
8372 /* Decide whether incrementing or decrementing. */
8373 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8374 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8375 this_optab = sub_optab;
8376
8377 /* Convert decrement by a constant into a negative increment. */
8378 if (this_optab == sub_optab
8379 && GET_CODE (op1) == CONST_INT)
8380 {
8381 op1 = GEN_INT (- INTVAL (op1));
8382 this_optab = add_optab;
8383 }
8384
8385 /* For a preincrement, see if we can do this with a single instruction. */
8386 if (!post)
8387 {
8388 icode = (int) this_optab->handlers[(int) mode].insn_code;
8389 if (icode != (int) CODE_FOR_nothing
8390 /* Make sure that OP0 is valid for operands 0 and 1
8391 of the insn we want to queue. */
8392 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8393 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8394 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8395 single_insn = 1;
8396 }
8397
8398 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8399 then we cannot just increment OP0. We must therefore contrive to
8400 increment the original value. Then, for postincrement, we can return
8401 OP0 since it is a copy of the old value. For preincrement, expand here
8402 unless we can do it with a single insn.
8403
8404 Likewise if storing directly into OP0 would clobber high bits
8405 we need to preserve (bad_subreg). */
8406 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8407 {
8408 /* This is the easiest way to increment the value wherever it is.
8409 Problems with multiple evaluation of INCREMENTED are prevented
8410 because either (1) it is a component_ref or preincrement,
8411 in which case it was stabilized above, or (2) it is an array_ref
8412 with constant index in an array in a register, which is
8413 safe to reevaluate. */
8414 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8415 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8416 ? MINUS_EXPR : PLUS_EXPR),
8417 TREE_TYPE (exp),
8418 incremented,
8419 TREE_OPERAND (exp, 1));
8420
8421 while (TREE_CODE (incremented) == NOP_EXPR
8422 || TREE_CODE (incremented) == CONVERT_EXPR)
8423 {
8424 newexp = convert (TREE_TYPE (incremented), newexp);
8425 incremented = TREE_OPERAND (incremented, 0);
8426 }
8427
8428 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8429 return post ? op0 : temp;
8430 }
8431
8432 if (post)
8433 {
8434 /* We have a true reference to the value in OP0.
8435 If there is an insn to add or subtract in this mode, queue it.
8436 Queueing the increment insn avoids the register shuffling
8437 that often results if we must increment now and first save
8438 the old value for subsequent use. */
8439
8440 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8441 op0 = stabilize (op0);
8442 #endif
8443
8444 icode = (int) this_optab->handlers[(int) mode].insn_code;
8445 if (icode != (int) CODE_FOR_nothing
8446 /* Make sure that OP0 is valid for operands 0 and 1
8447 of the insn we want to queue. */
8448 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8449 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8450 {
8451 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8452 op1 = force_reg (mode, op1);
8453
8454 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8455 }
8456 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8457 {
8458 rtx addr = (general_operand (XEXP (op0, 0), mode)
8459 ? force_reg (Pmode, XEXP (op0, 0))
8460 : copy_to_reg (XEXP (op0, 0)));
8461 rtx temp, result;
8462
8463 op0 = change_address (op0, VOIDmode, addr);
8464 temp = force_reg (GET_MODE (op0), op0);
8465 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8466 op1 = force_reg (mode, op1);
8467
8468 /* The increment queue is LIFO, thus we have to `queue'
8469 the instructions in reverse order. */
8470 enqueue_insn (op0, gen_move_insn (op0, temp));
8471 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8472 return result;
8473 }
8474 }
8475
8476 /* Preincrement, or we can't increment with one simple insn. */
8477 if (post)
8478 /* Save a copy of the value before inc or dec, to return it later. */
8479 temp = value = copy_to_reg (op0);
8480 else
8481 /* Arrange to return the incremented value. */
8482 /* Copy the rtx because expand_binop will protect from the queue,
8483 and the results of that would be invalid for us to return
8484 if our caller does emit_queue before using our result. */
8485 temp = copy_rtx (value = op0);
8486
8487 /* Increment however we can. */
8488 op1 = expand_binop (mode, this_optab, value, op1,
8489 current_function_check_memory_usage ? NULL_RTX : op0,
8490 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8491 /* Make sure the value is stored into OP0. */
8492 if (op1 != op0)
8493 emit_move_insn (op0, op1);
8494
8495 return temp;
8496 }
8497 \f
8498 /* Expand all function calls contained within EXP, innermost ones first.
8499 But don't look within expressions that have sequence points.
8500 For each CALL_EXPR, record the rtx for its value
8501 in the CALL_EXPR_RTL field. */
8502
8503 static void
8504 preexpand_calls (exp)
8505 tree exp;
8506 {
8507 register int nops, i;
8508 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8509
8510 if (! do_preexpand_calls)
8511 return;
8512
8513 /* Only expressions and references can contain calls. */
8514
8515 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8516 return;
8517
8518 switch (TREE_CODE (exp))
8519 {
8520 case CALL_EXPR:
8521 /* Do nothing if already expanded. */
8522 if (CALL_EXPR_RTL (exp) != 0
8523 /* Do nothing if the call returns a variable-sized object. */
8524 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
8525 /* Do nothing to built-in functions. */
8526 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
8527 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8528 == FUNCTION_DECL)
8529 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8530 return;
8531
8532 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8533 return;
8534
8535 case COMPOUND_EXPR:
8536 case COND_EXPR:
8537 case TRUTH_ANDIF_EXPR:
8538 case TRUTH_ORIF_EXPR:
8539 /* If we find one of these, then we can be sure
8540 the adjust will be done for it (since it makes jumps).
8541 Do it now, so that if this is inside an argument
8542 of a function, we don't get the stack adjustment
8543 after some other args have already been pushed. */
8544 do_pending_stack_adjust ();
8545 return;
8546
8547 case BLOCK:
8548 case RTL_EXPR:
8549 case WITH_CLEANUP_EXPR:
8550 case CLEANUP_POINT_EXPR:
8551 case TRY_CATCH_EXPR:
8552 return;
8553
8554 case SAVE_EXPR:
8555 if (SAVE_EXPR_RTL (exp) != 0)
8556 return;
8557
8558 default:
8559 break;
8560 }
8561
8562 nops = tree_code_length[(int) TREE_CODE (exp)];
8563 for (i = 0; i < nops; i++)
8564 if (TREE_OPERAND (exp, i) != 0)
8565 {
8566 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
8567 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
8568 It doesn't happen before the call is made. */
8569 ;
8570 else
8571 {
8572 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8573 if (type == 'e' || type == '<' || type == '1' || type == '2'
8574 || type == 'r')
8575 preexpand_calls (TREE_OPERAND (exp, i));
8576 }
8577 }
8578 }
8579 \f
8580 /* At the start of a function, record that we have no previously-pushed
8581 arguments waiting to be popped. */
8582
8583 void
8584 init_pending_stack_adjust ()
8585 {
8586 pending_stack_adjust = 0;
8587 }
8588
8589 /* When exiting from function, if safe, clear out any pending stack adjust
8590 so the adjustment won't get done.
8591
8592 Note, if the current function calls alloca, then it must have a
8593 frame pointer regardless of the value of flag_omit_frame_pointer. */
8594
8595 void
8596 clear_pending_stack_adjust ()
8597 {
8598 #ifdef EXIT_IGNORE_STACK
8599 if (optimize > 0
8600 && (! flag_omit_frame_pointer || current_function_calls_alloca)
8601 && EXIT_IGNORE_STACK
8602 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8603 && ! flag_inline_functions)
8604 pending_stack_adjust = 0;
8605 #endif
8606 }
8607
8608 /* Pop any previously-pushed arguments that have not been popped yet. */
8609
8610 void
8611 do_pending_stack_adjust ()
8612 {
8613 if (inhibit_defer_pop == 0)
8614 {
8615 if (pending_stack_adjust != 0)
8616 adjust_stack (GEN_INT (pending_stack_adjust));
8617 pending_stack_adjust = 0;
8618 }
8619 }
8620 \f
8621 /* Expand conditional expressions. */
8622
8623 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8624 LABEL is an rtx of code CODE_LABEL, in this function and all the
8625 functions here. */
8626
8627 void
8628 jumpifnot (exp, label)
8629 tree exp;
8630 rtx label;
8631 {
8632 do_jump (exp, label, NULL_RTX);
8633 }
8634
8635 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8636
8637 void
8638 jumpif (exp, label)
8639 tree exp;
8640 rtx label;
8641 {
8642 do_jump (exp, NULL_RTX, label);
8643 }
8644
8645 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8646 the result is zero, or IF_TRUE_LABEL if the result is one.
8647 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8648 meaning fall through in that case.
8649
8650 do_jump always does any pending stack adjust except when it does not
8651 actually perform a jump. An example where there is no jump
8652 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8653
8654 This function is responsible for optimizing cases such as
8655 &&, || and comparison operators in EXP. */
8656
8657 void
8658 do_jump (exp, if_false_label, if_true_label)
8659 tree exp;
8660 rtx if_false_label, if_true_label;
8661 {
8662 register enum tree_code code = TREE_CODE (exp);
8663 /* Some cases need to create a label to jump to
8664 in order to properly fall through.
8665 These cases set DROP_THROUGH_LABEL nonzero. */
8666 rtx drop_through_label = 0;
8667 rtx temp;
8668 int i;
8669 tree type;
8670 enum machine_mode mode;
8671
8672 #ifdef MAX_INTEGER_COMPUTATION_MODE
8673 check_max_integer_computation_mode (exp);
8674 #endif
8675
8676 emit_queue ();
8677
8678 switch (code)
8679 {
8680 case ERROR_MARK:
8681 break;
8682
8683 case INTEGER_CST:
8684 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8685 if (temp)
8686 emit_jump (temp);
8687 break;
8688
8689 #if 0
8690 /* This is not true with #pragma weak */
8691 case ADDR_EXPR:
8692 /* The address of something can never be zero. */
8693 if (if_true_label)
8694 emit_jump (if_true_label);
8695 break;
8696 #endif
8697
8698 case NOP_EXPR:
8699 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8700 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8701 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8702 goto normal;
8703 case CONVERT_EXPR:
8704 /* If we are narrowing the operand, we have to do the compare in the
8705 narrower mode. */
8706 if ((TYPE_PRECISION (TREE_TYPE (exp))
8707 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8708 goto normal;
8709 case NON_LVALUE_EXPR:
8710 case REFERENCE_EXPR:
8711 case ABS_EXPR:
8712 case NEGATE_EXPR:
8713 case LROTATE_EXPR:
8714 case RROTATE_EXPR:
8715 /* These cannot change zero->non-zero or vice versa. */
8716 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8717 break;
8718
8719 #if 0
8720 /* This is never less insns than evaluating the PLUS_EXPR followed by
8721 a test and can be longer if the test is eliminated. */
8722 case PLUS_EXPR:
8723 /* Reduce to minus. */
8724 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8725 TREE_OPERAND (exp, 0),
8726 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8727 TREE_OPERAND (exp, 1))));
8728 /* Process as MINUS. */
8729 #endif
8730
8731 case MINUS_EXPR:
8732 /* Non-zero iff operands of minus differ. */
8733 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
8734 TREE_OPERAND (exp, 0),
8735 TREE_OPERAND (exp, 1)),
8736 NE, NE, if_false_label, if_true_label);
8737 break;
8738
8739 case BIT_AND_EXPR:
8740 /* If we are AND'ing with a small constant, do this comparison in the
8741 smallest type that fits. If the machine doesn't have comparisons
8742 that small, it will be converted back to the wider comparison.
8743 This helps if we are testing the sign bit of a narrower object.
8744 combine can't do this for us because it can't know whether a
8745 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8746
8747 if (! SLOW_BYTE_ACCESS
8748 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8749 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8750 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8751 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8752 && (type = type_for_mode (mode, 1)) != 0
8753 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8754 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8755 != CODE_FOR_nothing))
8756 {
8757 do_jump (convert (type, exp), if_false_label, if_true_label);
8758 break;
8759 }
8760 goto normal;
8761
8762 case TRUTH_NOT_EXPR:
8763 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8764 break;
8765
8766 case TRUTH_ANDIF_EXPR:
8767 if (if_false_label == 0)
8768 if_false_label = drop_through_label = gen_label_rtx ();
8769 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8770 start_cleanup_deferral ();
8771 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8772 end_cleanup_deferral ();
8773 break;
8774
8775 case TRUTH_ORIF_EXPR:
8776 if (if_true_label == 0)
8777 if_true_label = drop_through_label = gen_label_rtx ();
8778 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8779 start_cleanup_deferral ();
8780 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8781 end_cleanup_deferral ();
8782 break;
8783
8784 case COMPOUND_EXPR:
8785 push_temp_slots ();
8786 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8787 preserve_temp_slots (NULL_RTX);
8788 free_temp_slots ();
8789 pop_temp_slots ();
8790 emit_queue ();
8791 do_pending_stack_adjust ();
8792 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8793 break;
8794
8795 case COMPONENT_REF:
8796 case BIT_FIELD_REF:
8797 case ARRAY_REF:
8798 {
8799 int bitsize, bitpos, unsignedp;
8800 enum machine_mode mode;
8801 tree type;
8802 tree offset;
8803 int volatilep = 0;
8804 int alignment;
8805
8806 /* Get description of this reference. We don't actually care
8807 about the underlying object here. */
8808 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8809 &mode, &unsignedp, &volatilep,
8810 &alignment);
8811
8812 type = type_for_size (bitsize, unsignedp);
8813 if (! SLOW_BYTE_ACCESS
8814 && type != 0 && bitsize >= 0
8815 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8816 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8817 != CODE_FOR_nothing))
8818 {
8819 do_jump (convert (type, exp), if_false_label, if_true_label);
8820 break;
8821 }
8822 goto normal;
8823 }
8824
8825 case COND_EXPR:
8826 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8827 if (integer_onep (TREE_OPERAND (exp, 1))
8828 && integer_zerop (TREE_OPERAND (exp, 2)))
8829 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8830
8831 else if (integer_zerop (TREE_OPERAND (exp, 1))
8832 && integer_onep (TREE_OPERAND (exp, 2)))
8833 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8834
8835 else
8836 {
8837 register rtx label1 = gen_label_rtx ();
8838 drop_through_label = gen_label_rtx ();
8839
8840 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8841
8842 start_cleanup_deferral ();
8843 /* Now the THEN-expression. */
8844 do_jump (TREE_OPERAND (exp, 1),
8845 if_false_label ? if_false_label : drop_through_label,
8846 if_true_label ? if_true_label : drop_through_label);
8847 /* In case the do_jump just above never jumps. */
8848 do_pending_stack_adjust ();
8849 emit_label (label1);
8850
8851 /* Now the ELSE-expression. */
8852 do_jump (TREE_OPERAND (exp, 2),
8853 if_false_label ? if_false_label : drop_through_label,
8854 if_true_label ? if_true_label : drop_through_label);
8855 end_cleanup_deferral ();
8856 }
8857 break;
8858
8859 case EQ_EXPR:
8860 {
8861 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8862
8863 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8864 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8865 {
8866 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8867 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8868 do_jump
8869 (fold
8870 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
8871 fold (build (EQ_EXPR, TREE_TYPE (exp),
8872 fold (build1 (REALPART_EXPR,
8873 TREE_TYPE (inner_type),
8874 exp0)),
8875 fold (build1 (REALPART_EXPR,
8876 TREE_TYPE (inner_type),
8877 exp1)))),
8878 fold (build (EQ_EXPR, TREE_TYPE (exp),
8879 fold (build1 (IMAGPART_EXPR,
8880 TREE_TYPE (inner_type),
8881 exp0)),
8882 fold (build1 (IMAGPART_EXPR,
8883 TREE_TYPE (inner_type),
8884 exp1)))))),
8885 if_false_label, if_true_label);
8886 }
8887
8888 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8889 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8890
8891 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8892 && !can_compare_p (TYPE_MODE (inner_type)))
8893 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8894 else
8895 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
8896 break;
8897 }
8898
8899 case NE_EXPR:
8900 {
8901 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8902
8903 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8904 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8905 {
8906 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8907 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8908 do_jump
8909 (fold
8910 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
8911 fold (build (NE_EXPR, TREE_TYPE (exp),
8912 fold (build1 (REALPART_EXPR,
8913 TREE_TYPE (inner_type),
8914 exp0)),
8915 fold (build1 (REALPART_EXPR,
8916 TREE_TYPE (inner_type),
8917 exp1)))),
8918 fold (build (NE_EXPR, TREE_TYPE (exp),
8919 fold (build1 (IMAGPART_EXPR,
8920 TREE_TYPE (inner_type),
8921 exp0)),
8922 fold (build1 (IMAGPART_EXPR,
8923 TREE_TYPE (inner_type),
8924 exp1)))))),
8925 if_false_label, if_true_label);
8926 }
8927
8928 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8929 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8930
8931 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8932 && !can_compare_p (TYPE_MODE (inner_type)))
8933 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8934 else
8935 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
8936 break;
8937 }
8938
8939 case LT_EXPR:
8940 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8941 == MODE_INT)
8942 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8943 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8944 else
8945 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
8946 break;
8947
8948 case LE_EXPR:
8949 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8950 == MODE_INT)
8951 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8952 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8953 else
8954 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
8955 break;
8956
8957 case GT_EXPR:
8958 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8959 == MODE_INT)
8960 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8961 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8962 else
8963 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
8964 break;
8965
8966 case GE_EXPR:
8967 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8968 == MODE_INT)
8969 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8970 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8971 else
8972 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
8973 break;
8974
8975 default:
8976 normal:
8977 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8978 #if 0
8979 /* This is not needed any more and causes poor code since it causes
8980 comparisons and tests from non-SI objects to have different code
8981 sequences. */
8982 /* Copy to register to avoid generating bad insns by cse
8983 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8984 if (!cse_not_expected && GET_CODE (temp) == MEM)
8985 temp = copy_to_reg (temp);
8986 #endif
8987 do_pending_stack_adjust ();
8988 /* Do any postincrements in the expression that was tested. */
8989 emit_queue ();
8990
8991 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
8992 {
8993 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
8994 if (target)
8995 emit_jump (target);
8996 }
8997 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8998 && ! can_compare_p (GET_MODE (temp)))
8999 /* Note swapping the labels gives us not-equal. */
9000 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9001 else if (GET_MODE (temp) != VOIDmode)
9002 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9003 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9004 GET_MODE (temp), NULL_RTX, 0,
9005 if_false_label, if_true_label);
9006 else
9007 abort ();
9008 }
9009
9010 if (drop_through_label)
9011 {
9012 /* If do_jump produces code that might be jumped around,
9013 do any stack adjusts from that code, before the place
9014 where control merges in. */
9015 do_pending_stack_adjust ();
9016 emit_label (drop_through_label);
9017 }
9018 }
9019 \f
9020 /* Given a comparison expression EXP for values too wide to be compared
9021 with one insn, test the comparison and jump to the appropriate label.
9022 The code of EXP is ignored; we always test GT if SWAP is 0,
9023 and LT if SWAP is 1. */
9024
9025 static void
9026 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9027 tree exp;
9028 int swap;
9029 rtx if_false_label, if_true_label;
9030 {
9031 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9032 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9033 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9034 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9035
9036 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9037 }
9038
9039 /* Compare OP0 with OP1, word at a time, in mode MODE.
9040 UNSIGNEDP says to do unsigned comparison.
9041 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9042
9043 void
9044 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9045 enum machine_mode mode;
9046 int unsignedp;
9047 rtx op0, op1;
9048 rtx if_false_label, if_true_label;
9049 {
9050 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9051 rtx drop_through_label = 0;
9052 int i;
9053
9054 if (! if_true_label || ! if_false_label)
9055 drop_through_label = gen_label_rtx ();
9056 if (! if_true_label)
9057 if_true_label = drop_through_label;
9058 if (! if_false_label)
9059 if_false_label = drop_through_label;
9060
9061 /* Compare a word at a time, high order first. */
9062 for (i = 0; i < nwords; i++)
9063 {
9064 rtx op0_word, op1_word;
9065
9066 if (WORDS_BIG_ENDIAN)
9067 {
9068 op0_word = operand_subword_force (op0, i, mode);
9069 op1_word = operand_subword_force (op1, i, mode);
9070 }
9071 else
9072 {
9073 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9074 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9075 }
9076
9077 /* All but high-order word must be compared as unsigned. */
9078 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9079 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9080 NULL_RTX, if_true_label);
9081
9082 /* Consider lower words only if these are equal. */
9083 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9084 NULL_RTX, 0, NULL_RTX, if_false_label);
9085 }
9086
9087 if (if_false_label)
9088 emit_jump (if_false_label);
9089 if (drop_through_label)
9090 emit_label (drop_through_label);
9091 }
9092
9093 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9094 with one insn, test the comparison and jump to the appropriate label. */
9095
9096 static void
9097 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9098 tree exp;
9099 rtx if_false_label, if_true_label;
9100 {
9101 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9102 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9103 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9104 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9105 int i;
9106 rtx drop_through_label = 0;
9107
9108 if (! if_false_label)
9109 drop_through_label = if_false_label = gen_label_rtx ();
9110
9111 for (i = 0; i < nwords; i++)
9112 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9113 operand_subword_force (op1, i, mode),
9114 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9115 word_mode, NULL_RTX, 0, if_false_label,
9116 NULL_RTX);
9117
9118 if (if_true_label)
9119 emit_jump (if_true_label);
9120 if (drop_through_label)
9121 emit_label (drop_through_label);
9122 }
9123 \f
9124 /* Jump according to whether OP0 is 0.
9125 We assume that OP0 has an integer mode that is too wide
9126 for the available compare insns. */
9127
9128 void
9129 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9130 rtx op0;
9131 rtx if_false_label, if_true_label;
9132 {
9133 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9134 rtx part;
9135 int i;
9136 rtx drop_through_label = 0;
9137
9138 /* The fastest way of doing this comparison on almost any machine is to
9139 "or" all the words and compare the result. If all have to be loaded
9140 from memory and this is a very wide item, it's possible this may
9141 be slower, but that's highly unlikely. */
9142
9143 part = gen_reg_rtx (word_mode);
9144 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9145 for (i = 1; i < nwords && part != 0; i++)
9146 part = expand_binop (word_mode, ior_optab, part,
9147 operand_subword_force (op0, i, GET_MODE (op0)),
9148 part, 1, OPTAB_WIDEN);
9149
9150 if (part != 0)
9151 {
9152 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9153 NULL_RTX, 0, if_false_label, if_true_label);
9154
9155 return;
9156 }
9157
9158 /* If we couldn't do the "or" simply, do this with a series of compares. */
9159 if (! if_false_label)
9160 drop_through_label = if_false_label = gen_label_rtx ();
9161
9162 for (i = 0; i < nwords; i++)
9163 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9164 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9165 if_false_label, NULL_RTX);
9166
9167 if (if_true_label)
9168 emit_jump (if_true_label);
9169
9170 if (drop_through_label)
9171 emit_label (drop_through_label);
9172 }
9173 \f
9174 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9175 (including code to compute the values to be compared)
9176 and set (CC0) according to the result.
9177 The decision as to signed or unsigned comparison must be made by the caller.
9178
9179 We force a stack adjustment unless there are currently
9180 things pushed on the stack that aren't yet used.
9181
9182 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9183 compared.
9184
9185 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9186 size of MODE should be used. */
9187
9188 rtx
9189 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9190 register rtx op0, op1;
9191 enum rtx_code code;
9192 int unsignedp;
9193 enum machine_mode mode;
9194 rtx size;
9195 int align;
9196 {
9197 rtx tem;
9198
9199 /* If one operand is constant, make it the second one. Only do this
9200 if the other operand is not constant as well. */
9201
9202 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9203 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9204 {
9205 tem = op0;
9206 op0 = op1;
9207 op1 = tem;
9208 code = swap_condition (code);
9209 }
9210
9211 if (flag_force_mem)
9212 {
9213 op0 = force_not_mem (op0);
9214 op1 = force_not_mem (op1);
9215 }
9216
9217 do_pending_stack_adjust ();
9218
9219 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9220 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9221 return tem;
9222
9223 #if 0
9224 /* There's no need to do this now that combine.c can eliminate lots of
9225 sign extensions. This can be less efficient in certain cases on other
9226 machines. */
9227
9228 /* If this is a signed equality comparison, we can do it as an
9229 unsigned comparison since zero-extension is cheaper than sign
9230 extension and comparisons with zero are done as unsigned. This is
9231 the case even on machines that can do fast sign extension, since
9232 zero-extension is easier to combine with other operations than
9233 sign-extension is. If we are comparing against a constant, we must
9234 convert it to what it would look like unsigned. */
9235 if ((code == EQ || code == NE) && ! unsignedp
9236 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9237 {
9238 if (GET_CODE (op1) == CONST_INT
9239 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9240 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9241 unsignedp = 1;
9242 }
9243 #endif
9244
9245 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9246
9247 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9248 }
9249
9250 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9251 The decision as to signed or unsigned comparison must be made by the caller.
9252
9253 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9254 compared.
9255
9256 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9257 size of MODE should be used. */
9258
9259 void
9260 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9261 if_false_label, if_true_label)
9262 register rtx op0, op1;
9263 enum rtx_code code;
9264 int unsignedp;
9265 enum machine_mode mode;
9266 rtx size;
9267 int align;
9268 rtx if_false_label, if_true_label;
9269 {
9270 rtx tem;
9271 int dummy_true_label = 0;
9272
9273 /* Reverse the comparison if that is safe and we want to jump if it is
9274 false. */
9275 if (! if_true_label && ! FLOAT_MODE_P (mode))
9276 {
9277 if_true_label = if_false_label;
9278 if_false_label = 0;
9279 code = reverse_condition (code);
9280 }
9281
9282 /* If one operand is constant, make it the second one. Only do this
9283 if the other operand is not constant as well. */
9284
9285 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9286 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9287 {
9288 tem = op0;
9289 op0 = op1;
9290 op1 = tem;
9291 code = swap_condition (code);
9292 }
9293
9294 if (flag_force_mem)
9295 {
9296 op0 = force_not_mem (op0);
9297 op1 = force_not_mem (op1);
9298 }
9299
9300 do_pending_stack_adjust ();
9301
9302 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9303 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9304 {
9305 if (tem == const_true_rtx)
9306 {
9307 if (if_true_label)
9308 emit_jump (if_true_label);
9309 }
9310 else
9311 {
9312 if (if_false_label)
9313 emit_jump (if_false_label);
9314 }
9315 return;
9316 }
9317
9318 #if 0
9319 /* There's no need to do this now that combine.c can eliminate lots of
9320 sign extensions. This can be less efficient in certain cases on other
9321 machines. */
9322
9323 /* If this is a signed equality comparison, we can do it as an
9324 unsigned comparison since zero-extension is cheaper than sign
9325 extension and comparisons with zero are done as unsigned. This is
9326 the case even on machines that can do fast sign extension, since
9327 zero-extension is easier to combine with other operations than
9328 sign-extension is. If we are comparing against a constant, we must
9329 convert it to what it would look like unsigned. */
9330 if ((code == EQ || code == NE) && ! unsignedp
9331 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9332 {
9333 if (GET_CODE (op1) == CONST_INT
9334 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9335 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9336 unsignedp = 1;
9337 }
9338 #endif
9339
9340 if (! if_true_label)
9341 {
9342 dummy_true_label = 1;
9343 if_true_label = gen_label_rtx ();
9344 }
9345
9346 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9347 if_true_label);
9348
9349 if (if_false_label)
9350 emit_jump (if_false_label);
9351 if (dummy_true_label)
9352 emit_label (if_true_label);
9353 }
9354
9355 /* Generate code for a comparison expression EXP (including code to compute
9356 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9357 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9358 generated code will drop through.
9359 SIGNED_CODE should be the rtx operation for this comparison for
9360 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9361
9362 We force a stack adjustment unless there are currently
9363 things pushed on the stack that aren't yet used. */
9364
9365 static void
9366 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9367 if_true_label)
9368 register tree exp;
9369 enum rtx_code signed_code, unsigned_code;
9370 rtx if_false_label, if_true_label;
9371 {
9372 register rtx op0, op1;
9373 register tree type;
9374 register enum machine_mode mode;
9375 int unsignedp;
9376 enum rtx_code code;
9377
9378 /* Don't crash if the comparison was erroneous. */
9379 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9380 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9381 return;
9382
9383 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9384 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9385 mode = TYPE_MODE (type);
9386 unsignedp = TREE_UNSIGNED (type);
9387 code = unsignedp ? unsigned_code : signed_code;
9388
9389 #ifdef HAVE_canonicalize_funcptr_for_compare
9390 /* If function pointers need to be "canonicalized" before they can
9391 be reliably compared, then canonicalize them. */
9392 if (HAVE_canonicalize_funcptr_for_compare
9393 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9394 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9395 == FUNCTION_TYPE))
9396 {
9397 rtx new_op0 = gen_reg_rtx (mode);
9398
9399 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9400 op0 = new_op0;
9401 }
9402
9403 if (HAVE_canonicalize_funcptr_for_compare
9404 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9405 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9406 == FUNCTION_TYPE))
9407 {
9408 rtx new_op1 = gen_reg_rtx (mode);
9409
9410 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9411 op1 = new_op1;
9412 }
9413 #endif
9414
9415 /* Do any postincrements in the expression that was tested. */
9416 emit_queue ();
9417
9418 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9419 ((mode == BLKmode)
9420 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9421 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
9422 if_false_label, if_true_label);
9423 }
9424 \f
9425 /* Generate code to calculate EXP using a store-flag instruction
9426 and return an rtx for the result. EXP is either a comparison
9427 or a TRUTH_NOT_EXPR whose operand is a comparison.
9428
9429 If TARGET is nonzero, store the result there if convenient.
9430
9431 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9432 cheap.
9433
9434 Return zero if there is no suitable set-flag instruction
9435 available on this machine.
9436
9437 Once expand_expr has been called on the arguments of the comparison,
9438 we are committed to doing the store flag, since it is not safe to
9439 re-evaluate the expression. We emit the store-flag insn by calling
9440 emit_store_flag, but only expand the arguments if we have a reason
9441 to believe that emit_store_flag will be successful. If we think that
9442 it will, but it isn't, we have to simulate the store-flag with a
9443 set/jump/set sequence. */
9444
9445 static rtx
9446 do_store_flag (exp, target, mode, only_cheap)
9447 tree exp;
9448 rtx target;
9449 enum machine_mode mode;
9450 int only_cheap;
9451 {
9452 enum rtx_code code;
9453 tree arg0, arg1, type;
9454 tree tem;
9455 enum machine_mode operand_mode;
9456 int invert = 0;
9457 int unsignedp;
9458 rtx op0, op1;
9459 enum insn_code icode;
9460 rtx subtarget = target;
9461 rtx result, label;
9462
9463 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9464 result at the end. We can't simply invert the test since it would
9465 have already been inverted if it were valid. This case occurs for
9466 some floating-point comparisons. */
9467
9468 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9469 invert = 1, exp = TREE_OPERAND (exp, 0);
9470
9471 arg0 = TREE_OPERAND (exp, 0);
9472 arg1 = TREE_OPERAND (exp, 1);
9473 type = TREE_TYPE (arg0);
9474 operand_mode = TYPE_MODE (type);
9475 unsignedp = TREE_UNSIGNED (type);
9476
9477 /* We won't bother with BLKmode store-flag operations because it would mean
9478 passing a lot of information to emit_store_flag. */
9479 if (operand_mode == BLKmode)
9480 return 0;
9481
9482 /* We won't bother with store-flag operations involving function pointers
9483 when function pointers must be canonicalized before comparisons. */
9484 #ifdef HAVE_canonicalize_funcptr_for_compare
9485 if (HAVE_canonicalize_funcptr_for_compare
9486 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9487 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9488 == FUNCTION_TYPE))
9489 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9490 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9491 == FUNCTION_TYPE))))
9492 return 0;
9493 #endif
9494
9495 STRIP_NOPS (arg0);
9496 STRIP_NOPS (arg1);
9497
9498 /* Get the rtx comparison code to use. We know that EXP is a comparison
9499 operation of some type. Some comparisons against 1 and -1 can be
9500 converted to comparisons with zero. Do so here so that the tests
9501 below will be aware that we have a comparison with zero. These
9502 tests will not catch constants in the first operand, but constants
9503 are rarely passed as the first operand. */
9504
9505 switch (TREE_CODE (exp))
9506 {
9507 case EQ_EXPR:
9508 code = EQ;
9509 break;
9510 case NE_EXPR:
9511 code = NE;
9512 break;
9513 case LT_EXPR:
9514 if (integer_onep (arg1))
9515 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9516 else
9517 code = unsignedp ? LTU : LT;
9518 break;
9519 case LE_EXPR:
9520 if (! unsignedp && integer_all_onesp (arg1))
9521 arg1 = integer_zero_node, code = LT;
9522 else
9523 code = unsignedp ? LEU : LE;
9524 break;
9525 case GT_EXPR:
9526 if (! unsignedp && integer_all_onesp (arg1))
9527 arg1 = integer_zero_node, code = GE;
9528 else
9529 code = unsignedp ? GTU : GT;
9530 break;
9531 case GE_EXPR:
9532 if (integer_onep (arg1))
9533 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9534 else
9535 code = unsignedp ? GEU : GE;
9536 break;
9537 default:
9538 abort ();
9539 }
9540
9541 /* Put a constant second. */
9542 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9543 {
9544 tem = arg0; arg0 = arg1; arg1 = tem;
9545 code = swap_condition (code);
9546 }
9547
9548 /* If this is an equality or inequality test of a single bit, we can
9549 do this by shifting the bit being tested to the low-order bit and
9550 masking the result with the constant 1. If the condition was EQ,
9551 we xor it with 1. This does not require an scc insn and is faster
9552 than an scc insn even if we have it. */
9553
9554 if ((code == NE || code == EQ)
9555 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9556 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9557 {
9558 tree inner = TREE_OPERAND (arg0, 0);
9559 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9560 int ops_unsignedp;
9561
9562 /* If INNER is a right shift of a constant and it plus BITNUM does
9563 not overflow, adjust BITNUM and INNER. */
9564
9565 if (TREE_CODE (inner) == RSHIFT_EXPR
9566 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9567 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9568 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9569 < TYPE_PRECISION (type)))
9570 {
9571 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9572 inner = TREE_OPERAND (inner, 0);
9573 }
9574
9575 /* If we are going to be able to omit the AND below, we must do our
9576 operations as unsigned. If we must use the AND, we have a choice.
9577 Normally unsigned is faster, but for some machines signed is. */
9578 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9579 #ifdef LOAD_EXTEND_OP
9580 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9581 #else
9582 : 1
9583 #endif
9584 );
9585
9586 if (subtarget == 0 || GET_CODE (subtarget) != REG
9587 || GET_MODE (subtarget) != operand_mode
9588 || ! safe_from_p (subtarget, inner, 1))
9589 subtarget = 0;
9590
9591 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9592
9593 if (bitnum != 0)
9594 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9595 size_int (bitnum), subtarget, ops_unsignedp);
9596
9597 if (GET_MODE (op0) != mode)
9598 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9599
9600 if ((code == EQ && ! invert) || (code == NE && invert))
9601 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9602 ops_unsignedp, OPTAB_LIB_WIDEN);
9603
9604 /* Put the AND last so it can combine with more things. */
9605 if (bitnum != TYPE_PRECISION (type) - 1)
9606 op0 = expand_and (op0, const1_rtx, subtarget);
9607
9608 return op0;
9609 }
9610
9611 /* Now see if we are likely to be able to do this. Return if not. */
9612 if (! can_compare_p (operand_mode))
9613 return 0;
9614 icode = setcc_gen_code[(int) code];
9615 if (icode == CODE_FOR_nothing
9616 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9617 {
9618 /* We can only do this if it is one of the special cases that
9619 can be handled without an scc insn. */
9620 if ((code == LT && integer_zerop (arg1))
9621 || (! only_cheap && code == GE && integer_zerop (arg1)))
9622 ;
9623 else if (BRANCH_COST >= 0
9624 && ! only_cheap && (code == NE || code == EQ)
9625 && TREE_CODE (type) != REAL_TYPE
9626 && ((abs_optab->handlers[(int) operand_mode].insn_code
9627 != CODE_FOR_nothing)
9628 || (ffs_optab->handlers[(int) operand_mode].insn_code
9629 != CODE_FOR_nothing)))
9630 ;
9631 else
9632 return 0;
9633 }
9634
9635 preexpand_calls (exp);
9636 if (subtarget == 0 || GET_CODE (subtarget) != REG
9637 || GET_MODE (subtarget) != operand_mode
9638 || ! safe_from_p (subtarget, arg1, 1))
9639 subtarget = 0;
9640
9641 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9642 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9643
9644 if (target == 0)
9645 target = gen_reg_rtx (mode);
9646
9647 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9648 because, if the emit_store_flag does anything it will succeed and
9649 OP0 and OP1 will not be used subsequently. */
9650
9651 result = emit_store_flag (target, code,
9652 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9653 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9654 operand_mode, unsignedp, 1);
9655
9656 if (result)
9657 {
9658 if (invert)
9659 result = expand_binop (mode, xor_optab, result, const1_rtx,
9660 result, 0, OPTAB_LIB_WIDEN);
9661 return result;
9662 }
9663
9664 /* If this failed, we have to do this with set/compare/jump/set code. */
9665 if (GET_CODE (target) != REG
9666 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9667 target = gen_reg_rtx (GET_MODE (target));
9668
9669 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9670 result = compare_from_rtx (op0, op1, code, unsignedp,
9671 operand_mode, NULL_RTX, 0);
9672 if (GET_CODE (result) == CONST_INT)
9673 return (((result == const0_rtx && ! invert)
9674 || (result != const0_rtx && invert))
9675 ? const0_rtx : const1_rtx);
9676
9677 label = gen_label_rtx ();
9678 if (bcc_gen_fctn[(int) code] == 0)
9679 abort ();
9680
9681 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9682 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9683 emit_label (label);
9684
9685 return target;
9686 }
9687 \f
9688 /* Generate a tablejump instruction (used for switch statements). */
9689
9690 #ifdef HAVE_tablejump
9691
9692 /* INDEX is the value being switched on, with the lowest value
9693 in the table already subtracted.
9694 MODE is its expected mode (needed if INDEX is constant).
9695 RANGE is the length of the jump table.
9696 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9697
9698 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9699 index value is out of range. */
9700
9701 void
9702 do_tablejump (index, mode, range, table_label, default_label)
9703 rtx index, range, table_label, default_label;
9704 enum machine_mode mode;
9705 {
9706 register rtx temp, vector;
9707
9708 /* Do an unsigned comparison (in the proper mode) between the index
9709 expression and the value which represents the length of the range.
9710 Since we just finished subtracting the lower bound of the range
9711 from the index expression, this comparison allows us to simultaneously
9712 check that the original index expression value is both greater than
9713 or equal to the minimum value of the range and less than or equal to
9714 the maximum value of the range. */
9715
9716 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9717 0, default_label);
9718
9719 /* If index is in range, it must fit in Pmode.
9720 Convert to Pmode so we can index with it. */
9721 if (mode != Pmode)
9722 index = convert_to_mode (Pmode, index, 1);
9723
9724 /* Don't let a MEM slip thru, because then INDEX that comes
9725 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9726 and break_out_memory_refs will go to work on it and mess it up. */
9727 #ifdef PIC_CASE_VECTOR_ADDRESS
9728 if (flag_pic && GET_CODE (index) != REG)
9729 index = copy_to_mode_reg (Pmode, index);
9730 #endif
9731
9732 /* If flag_force_addr were to affect this address
9733 it could interfere with the tricky assumptions made
9734 about addresses that contain label-refs,
9735 which may be valid only very near the tablejump itself. */
9736 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9737 GET_MODE_SIZE, because this indicates how large insns are. The other
9738 uses should all be Pmode, because they are addresses. This code
9739 could fail if addresses and insns are not the same size. */
9740 index = gen_rtx_PLUS (Pmode,
9741 gen_rtx_MULT (Pmode, index,
9742 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9743 gen_rtx_LABEL_REF (Pmode, table_label));
9744 #ifdef PIC_CASE_VECTOR_ADDRESS
9745 if (flag_pic)
9746 index = PIC_CASE_VECTOR_ADDRESS (index);
9747 else
9748 #endif
9749 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9750 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9751 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9752 RTX_UNCHANGING_P (vector) = 1;
9753 convert_move (temp, vector, 0);
9754
9755 emit_jump_insn (gen_tablejump (temp, table_label));
9756
9757 /* If we are generating PIC code or if the table is PC-relative, the
9758 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9759 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9760 emit_barrier ();
9761 }
9762
9763 #endif /* HAVE_tablejump */