expr.c (store_constructor): Don't call clear_storage if size is variable.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43 #include "ggc.h"
44 #include "tm_p.h"
45
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
53
54 #ifdef PUSH_ROUNDING
55
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
59
60 #endif
61
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
69
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
73 #endif
74
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81 int cse_not_expected;
82
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
87
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage;
92
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list = 0;
95
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
99 {
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 int to_struct;
105 int to_readonly;
106 rtx from;
107 rtx from_addr;
108 int autinc_from;
109 int explicit_inc_from;
110 int from_struct;
111 int from_readonly;
112 int len;
113 int offset;
114 int reverse;
115 };
116
117 /* This structure is used by clear_by_pieces to describe the clear to
118 be performed. */
119
120 struct clear_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 int to_struct;
127 int len;
128 int offset;
129 int reverse;
130 };
131
132 extern struct obstack permanent_obstack;
133
134 static rtx get_push_address PROTO ((int));
135
136 static rtx enqueue_insn PROTO((rtx, rtx));
137 static int move_by_pieces_ninsns PROTO((unsigned int, int));
138 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static void clear_by_pieces PROTO((rtx, int, int));
141 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
142 enum machine_mode,
143 struct clear_by_pieces *));
144 static int is_zeros_p PROTO((tree));
145 static int mostly_zeros_p PROTO((tree));
146 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
147 tree, tree, int, int));
148 static void store_constructor PROTO((tree, rtx, int, int, int));
149 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
150 enum machine_mode, int, int,
151 int, int));
152 static enum memory_use_mode
153 get_memory_usage_from_modifier PROTO((enum expand_modifier));
154 static tree save_noncopied_parts PROTO((tree, tree));
155 static tree init_noncopied_parts PROTO((tree, tree));
156 static int safe_from_p PROTO((rtx, tree, int));
157 static int fixed_type_p PROTO((tree));
158 static rtx var_rtx PROTO((tree));
159 static int readonly_fields_p PROTO((tree));
160 static rtx expand_expr_unaligned PROTO((tree, int *));
161 static rtx expand_increment PROTO((tree, int, int));
162 static void preexpand_calls PROTO((tree));
163 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
164 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
165 static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
166 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
167
168 /* Record for each mode whether we can move a register directly to or
169 from an object of that mode in memory. If we can't, we won't try
170 to use that mode directly when accessing a field of that mode. */
171
172 static char direct_load[NUM_MACHINE_MODES];
173 static char direct_store[NUM_MACHINE_MODES];
174
175 /* If a memory-to-memory move would take MOVE_RATIO or more simple
176 move-instruction sequences, we will do a movstr or libcall instead. */
177
178 #ifndef MOVE_RATIO
179 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
180 #define MOVE_RATIO 2
181 #else
182 /* If we are optimizing for space (-Os), cut down the default move ratio */
183 #define MOVE_RATIO (optimize_size ? 3 : 15)
184 #endif
185 #endif
186
187 /* This macro is used to determine whether move_by_pieces should be called
188 to perform a structure copy. */
189 #ifndef MOVE_BY_PIECES_P
190 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
191 (SIZE, ALIGN) < MOVE_RATIO)
192 #endif
193
194 /* This array records the insn_code of insns to perform block moves. */
195 enum insn_code movstr_optab[NUM_MACHINE_MODES];
196
197 /* This array records the insn_code of insns to perform block clears. */
198 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
199
200 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
201
202 #ifndef SLOW_UNALIGNED_ACCESS
203 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
204 #endif
205 \f
206 /* This is run once per compilation to set up which modes can be used
207 directly in memory and to initialize the block move optab. */
208
209 void
210 init_expr_once ()
211 {
212 rtx insn, pat;
213 enum machine_mode mode;
214 int num_clobbers;
215 rtx mem, mem1;
216 char *free_point;
217
218 start_sequence ();
219
220 /* Since we are on the permanent obstack, we must be sure we save this
221 spot AFTER we call start_sequence, since it will reuse the rtl it
222 makes. */
223 free_point = (char *) oballoc (0);
224
225 /* Try indexing by frame ptr and try by stack ptr.
226 It is known that on the Convex the stack ptr isn't a valid index.
227 With luck, one or the other is valid on any machine. */
228 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
229 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
230
231 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
232 pat = PATTERN (insn);
233
234 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
235 mode = (enum machine_mode) ((int) mode + 1))
236 {
237 int regno;
238 rtx reg;
239
240 direct_load[(int) mode] = direct_store[(int) mode] = 0;
241 PUT_MODE (mem, mode);
242 PUT_MODE (mem1, mode);
243
244 /* See if there is some register that can be used in this mode and
245 directly loaded or stored from memory. */
246
247 if (mode != VOIDmode && mode != BLKmode)
248 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
249 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
250 regno++)
251 {
252 if (! HARD_REGNO_MODE_OK (regno, mode))
253 continue;
254
255 reg = gen_rtx_REG (mode, regno);
256
257 SET_SRC (pat) = mem;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
261
262 SET_SRC (pat) = mem1;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
266
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
271
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem1;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
276 }
277 }
278
279 end_sequence ();
280 obfree (free_point);
281 }
282
283 /* This is run at the start of compiling a function. */
284
285 void
286 init_expr ()
287 {
288 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
289
290 pending_chain = 0;
291 pending_stack_adjust = 0;
292 inhibit_defer_pop = 0;
293 saveregs_value = 0;
294 apply_args_value = 0;
295 forced_labels = 0;
296 }
297
298 void
299 mark_expr_status (p)
300 struct expr_status *p;
301 {
302 if (p == NULL)
303 return;
304
305 ggc_mark_rtx (p->x_saveregs_value);
306 ggc_mark_rtx (p->x_apply_args_value);
307 ggc_mark_rtx (p->x_forced_labels);
308 }
309
310 void
311 free_expr_status (f)
312 struct function *f;
313 {
314 free (f->expr);
315 f->expr = NULL;
316 }
317
318 /* Small sanity check that the queue is empty at the end of a function. */
319 void
320 finish_expr_for_function ()
321 {
322 if (pending_chain)
323 abort ();
324 }
325 \f
326 /* Manage the queue of increment instructions to be output
327 for POSTINCREMENT_EXPR expressions, etc. */
328
329 /* Queue up to increment (or change) VAR later. BODY says how:
330 BODY should be the same thing you would pass to emit_insn
331 to increment right away. It will go to emit_insn later on.
332
333 The value is a QUEUED expression to be used in place of VAR
334 where you want to guarantee the pre-incrementation value of VAR. */
335
336 static rtx
337 enqueue_insn (var, body)
338 rtx var, body;
339 {
340 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
341 body, pending_chain);
342 return pending_chain;
343 }
344
345 /* Use protect_from_queue to convert a QUEUED expression
346 into something that you can put immediately into an instruction.
347 If the queued incrementation has not happened yet,
348 protect_from_queue returns the variable itself.
349 If the incrementation has happened, protect_from_queue returns a temp
350 that contains a copy of the old value of the variable.
351
352 Any time an rtx which might possibly be a QUEUED is to be put
353 into an instruction, it must be passed through protect_from_queue first.
354 QUEUED expressions are not meaningful in instructions.
355
356 Do not pass a value through protect_from_queue and then hold
357 on to it for a while before putting it in an instruction!
358 If the queue is flushed in between, incorrect code will result. */
359
360 rtx
361 protect_from_queue (x, modify)
362 register rtx x;
363 int modify;
364 {
365 register RTX_CODE code = GET_CODE (x);
366
367 #if 0 /* A QUEUED can hang around after the queue is forced out. */
368 /* Shortcut for most common case. */
369 if (pending_chain == 0)
370 return x;
371 #endif
372
373 if (code != QUEUED)
374 {
375 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
376 use of autoincrement. Make a copy of the contents of the memory
377 location rather than a copy of the address, but not if the value is
378 of mode BLKmode. Don't modify X in place since it might be
379 shared. */
380 if (code == MEM && GET_MODE (x) != BLKmode
381 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
382 {
383 register rtx y = XEXP (x, 0);
384 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
385
386 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
387 MEM_COPY_ATTRIBUTES (new, x);
388 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
389
390 if (QUEUED_INSN (y))
391 {
392 register rtx temp = gen_reg_rtx (GET_MODE (new));
393 emit_insn_before (gen_move_insn (temp, new),
394 QUEUED_INSN (y));
395 return temp;
396 }
397 return new;
398 }
399 /* Otherwise, recursively protect the subexpressions of all
400 the kinds of rtx's that can contain a QUEUED. */
401 if (code == MEM)
402 {
403 rtx tem = protect_from_queue (XEXP (x, 0), 0);
404 if (tem != XEXP (x, 0))
405 {
406 x = copy_rtx (x);
407 XEXP (x, 0) = tem;
408 }
409 }
410 else if (code == PLUS || code == MULT)
411 {
412 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
413 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
414 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
415 {
416 x = copy_rtx (x);
417 XEXP (x, 0) = new0;
418 XEXP (x, 1) = new1;
419 }
420 }
421 return x;
422 }
423 /* If the increment has not happened, use the variable itself. */
424 if (QUEUED_INSN (x) == 0)
425 return QUEUED_VAR (x);
426 /* If the increment has happened and a pre-increment copy exists,
427 use that copy. */
428 if (QUEUED_COPY (x) != 0)
429 return QUEUED_COPY (x);
430 /* The increment has happened but we haven't set up a pre-increment copy.
431 Set one up now, and use it. */
432 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
433 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
434 QUEUED_INSN (x));
435 return QUEUED_COPY (x);
436 }
437
438 /* Return nonzero if X contains a QUEUED expression:
439 if it contains anything that will be altered by a queued increment.
440 We handle only combinations of MEM, PLUS, MINUS and MULT operators
441 since memory addresses generally contain only those. */
442
443 int
444 queued_subexp_p (x)
445 rtx x;
446 {
447 register enum rtx_code code = GET_CODE (x);
448 switch (code)
449 {
450 case QUEUED:
451 return 1;
452 case MEM:
453 return queued_subexp_p (XEXP (x, 0));
454 case MULT:
455 case PLUS:
456 case MINUS:
457 return (queued_subexp_p (XEXP (x, 0))
458 || queued_subexp_p (XEXP (x, 1)));
459 default:
460 return 0;
461 }
462 }
463
464 /* Perform all the pending incrementations. */
465
466 void
467 emit_queue ()
468 {
469 register rtx p;
470 while ((p = pending_chain))
471 {
472 rtx body = QUEUED_BODY (p);
473
474 if (GET_CODE (body) == SEQUENCE)
475 {
476 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
477 emit_insn (QUEUED_BODY (p));
478 }
479 else
480 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
481 pending_chain = QUEUED_NEXT (p);
482 }
483 }
484 \f
485 /* Copy data from FROM to TO, where the machine modes are not the same.
486 Both modes may be integer, or both may be floating.
487 UNSIGNEDP should be nonzero if FROM is an unsigned type.
488 This causes zero-extension instead of sign-extension. */
489
490 void
491 convert_move (to, from, unsignedp)
492 register rtx to, from;
493 int unsignedp;
494 {
495 enum machine_mode to_mode = GET_MODE (to);
496 enum machine_mode from_mode = GET_MODE (from);
497 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
498 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
499 enum insn_code code;
500 rtx libcall;
501
502 /* rtx code for making an equivalent value. */
503 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
504
505 to = protect_from_queue (to, 1);
506 from = protect_from_queue (from, 0);
507
508 if (to_real != from_real)
509 abort ();
510
511 /* If FROM is a SUBREG that indicates that we have already done at least
512 the required extension, strip it. We don't handle such SUBREGs as
513 TO here. */
514
515 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
516 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
517 >= GET_MODE_SIZE (to_mode))
518 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
519 from = gen_lowpart (to_mode, from), from_mode = to_mode;
520
521 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
522 abort ();
523
524 if (to_mode == from_mode
525 || (from_mode == VOIDmode && CONSTANT_P (from)))
526 {
527 emit_move_insn (to, from);
528 return;
529 }
530
531 if (to_real)
532 {
533 rtx value;
534
535 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
536 {
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, 0))
539 != CODE_FOR_nothing)
540 {
541 emit_unop_insn (code, to, from, UNKNOWN);
542 return;
543 }
544 }
545
546 #ifdef HAVE_trunchfqf2
547 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
548 {
549 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
550 return;
551 }
552 #endif
553 #ifdef HAVE_trunctqfqf2
554 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
555 {
556 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
557 return;
558 }
559 #endif
560 #ifdef HAVE_truncsfqf2
561 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
562 {
563 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
564 return;
565 }
566 #endif
567 #ifdef HAVE_truncdfqf2
568 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
569 {
570 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
571 return;
572 }
573 #endif
574 #ifdef HAVE_truncxfqf2
575 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
576 {
577 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
578 return;
579 }
580 #endif
581 #ifdef HAVE_trunctfqf2
582 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
583 {
584 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
585 return;
586 }
587 #endif
588
589 #ifdef HAVE_trunctqfhf2
590 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
591 {
592 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
593 return;
594 }
595 #endif
596 #ifdef HAVE_truncsfhf2
597 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
598 {
599 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
600 return;
601 }
602 #endif
603 #ifdef HAVE_truncdfhf2
604 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
605 {
606 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
607 return;
608 }
609 #endif
610 #ifdef HAVE_truncxfhf2
611 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
612 {
613 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
614 return;
615 }
616 #endif
617 #ifdef HAVE_trunctfhf2
618 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
619 {
620 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
621 return;
622 }
623 #endif
624
625 #ifdef HAVE_truncsftqf2
626 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
627 {
628 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
629 return;
630 }
631 #endif
632 #ifdef HAVE_truncdftqf2
633 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
634 {
635 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
636 return;
637 }
638 #endif
639 #ifdef HAVE_truncxftqf2
640 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
641 {
642 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
643 return;
644 }
645 #endif
646 #ifdef HAVE_trunctftqf2
647 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
648 {
649 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
650 return;
651 }
652 #endif
653
654 #ifdef HAVE_truncdfsf2
655 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
656 {
657 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661 #ifdef HAVE_truncxfsf2
662 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
665 return;
666 }
667 #endif
668 #ifdef HAVE_trunctfsf2
669 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
670 {
671 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
672 return;
673 }
674 #endif
675 #ifdef HAVE_truncxfdf2
676 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
677 {
678 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
679 return;
680 }
681 #endif
682 #ifdef HAVE_trunctfdf2
683 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
684 {
685 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
686 return;
687 }
688 #endif
689
690 libcall = (rtx) 0;
691 switch (from_mode)
692 {
693 case SFmode:
694 switch (to_mode)
695 {
696 case DFmode:
697 libcall = extendsfdf2_libfunc;
698 break;
699
700 case XFmode:
701 libcall = extendsfxf2_libfunc;
702 break;
703
704 case TFmode:
705 libcall = extendsftf2_libfunc;
706 break;
707
708 default:
709 break;
710 }
711 break;
712
713 case DFmode:
714 switch (to_mode)
715 {
716 case SFmode:
717 libcall = truncdfsf2_libfunc;
718 break;
719
720 case XFmode:
721 libcall = extenddfxf2_libfunc;
722 break;
723
724 case TFmode:
725 libcall = extenddftf2_libfunc;
726 break;
727
728 default:
729 break;
730 }
731 break;
732
733 case XFmode:
734 switch (to_mode)
735 {
736 case SFmode:
737 libcall = truncxfsf2_libfunc;
738 break;
739
740 case DFmode:
741 libcall = truncxfdf2_libfunc;
742 break;
743
744 default:
745 break;
746 }
747 break;
748
749 case TFmode:
750 switch (to_mode)
751 {
752 case SFmode:
753 libcall = trunctfsf2_libfunc;
754 break;
755
756 case DFmode:
757 libcall = trunctfdf2_libfunc;
758 break;
759
760 default:
761 break;
762 }
763 break;
764
765 default:
766 break;
767 }
768
769 if (libcall == (rtx) 0)
770 /* This conversion is not implemented yet. */
771 abort ();
772
773 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
774 1, from, from_mode);
775 emit_move_insn (to, value);
776 return;
777 }
778
779 /* Now both modes are integers. */
780
781 /* Handle expanding beyond a word. */
782 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
783 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
784 {
785 rtx insns;
786 rtx lowpart;
787 rtx fill_value;
788 rtx lowfrom;
789 int i;
790 enum machine_mode lowpart_mode;
791 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
792
793 /* Try converting directly if the insn is supported. */
794 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
795 != CODE_FOR_nothing)
796 {
797 /* If FROM is a SUBREG, put it into a register. Do this
798 so that we always generate the same set of insns for
799 better cse'ing; if an intermediate assignment occurred,
800 we won't be doing the operation directly on the SUBREG. */
801 if (optimize > 0 && GET_CODE (from) == SUBREG)
802 from = force_reg (from_mode, from);
803 emit_unop_insn (code, to, from, equiv_code);
804 return;
805 }
806 /* Next, try converting via full word. */
807 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
808 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
809 != CODE_FOR_nothing))
810 {
811 if (GET_CODE (to) == REG)
812 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
813 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
814 emit_unop_insn (code, to,
815 gen_lowpart (word_mode, to), equiv_code);
816 return;
817 }
818
819 /* No special multiword conversion insn; do it by hand. */
820 start_sequence ();
821
822 /* Since we will turn this into a no conflict block, we must ensure
823 that the source does not overlap the target. */
824
825 if (reg_overlap_mentioned_p (to, from))
826 from = force_reg (from_mode, from);
827
828 /* Get a copy of FROM widened to a word, if necessary. */
829 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
830 lowpart_mode = word_mode;
831 else
832 lowpart_mode = from_mode;
833
834 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
835
836 lowpart = gen_lowpart (lowpart_mode, to);
837 emit_move_insn (lowpart, lowfrom);
838
839 /* Compute the value to put in each remaining word. */
840 if (unsignedp)
841 fill_value = const0_rtx;
842 else
843 {
844 #ifdef HAVE_slt
845 if (HAVE_slt
846 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
847 && STORE_FLAG_VALUE == -1)
848 {
849 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
850 lowpart_mode, 0, 0);
851 fill_value = gen_reg_rtx (word_mode);
852 emit_insn (gen_slt (fill_value));
853 }
854 else
855 #endif
856 {
857 fill_value
858 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
859 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
860 NULL_RTX, 0);
861 fill_value = convert_to_mode (word_mode, fill_value, 1);
862 }
863 }
864
865 /* Fill the remaining words. */
866 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
867 {
868 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
869 rtx subword = operand_subword (to, index, 1, to_mode);
870
871 if (subword == 0)
872 abort ();
873
874 if (fill_value != subword)
875 emit_move_insn (subword, fill_value);
876 }
877
878 insns = get_insns ();
879 end_sequence ();
880
881 emit_no_conflict_block (insns, to, from, NULL_RTX,
882 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
883 return;
884 }
885
886 /* Truncating multi-word to a word or less. */
887 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
888 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
889 {
890 if (!((GET_CODE (from) == MEM
891 && ! MEM_VOLATILE_P (from)
892 && direct_load[(int) to_mode]
893 && ! mode_dependent_address_p (XEXP (from, 0)))
894 || GET_CODE (from) == REG
895 || GET_CODE (from) == SUBREG))
896 from = force_reg (from_mode, from);
897 convert_move (to, gen_lowpart (word_mode, from), 0);
898 return;
899 }
900
901 /* Handle pointer conversion */ /* SPEE 900220 */
902 if (to_mode == PQImode)
903 {
904 if (from_mode != QImode)
905 from = convert_to_mode (QImode, from, unsignedp);
906
907 #ifdef HAVE_truncqipqi2
908 if (HAVE_truncqipqi2)
909 {
910 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
911 return;
912 }
913 #endif /* HAVE_truncqipqi2 */
914 abort ();
915 }
916
917 if (from_mode == PQImode)
918 {
919 if (to_mode != QImode)
920 {
921 from = convert_to_mode (QImode, from, unsignedp);
922 from_mode = QImode;
923 }
924 else
925 {
926 #ifdef HAVE_extendpqiqi2
927 if (HAVE_extendpqiqi2)
928 {
929 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
930 return;
931 }
932 #endif /* HAVE_extendpqiqi2 */
933 abort ();
934 }
935 }
936
937 if (to_mode == PSImode)
938 {
939 if (from_mode != SImode)
940 from = convert_to_mode (SImode, from, unsignedp);
941
942 #ifdef HAVE_truncsipsi2
943 if (HAVE_truncsipsi2)
944 {
945 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
946 return;
947 }
948 #endif /* HAVE_truncsipsi2 */
949 abort ();
950 }
951
952 if (from_mode == PSImode)
953 {
954 if (to_mode != SImode)
955 {
956 from = convert_to_mode (SImode, from, unsignedp);
957 from_mode = SImode;
958 }
959 else
960 {
961 #ifdef HAVE_extendpsisi2
962 if (HAVE_extendpsisi2)
963 {
964 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
965 return;
966 }
967 #endif /* HAVE_extendpsisi2 */
968 abort ();
969 }
970 }
971
972 if (to_mode == PDImode)
973 {
974 if (from_mode != DImode)
975 from = convert_to_mode (DImode, from, unsignedp);
976
977 #ifdef HAVE_truncdipdi2
978 if (HAVE_truncdipdi2)
979 {
980 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
981 return;
982 }
983 #endif /* HAVE_truncdipdi2 */
984 abort ();
985 }
986
987 if (from_mode == PDImode)
988 {
989 if (to_mode != DImode)
990 {
991 from = convert_to_mode (DImode, from, unsignedp);
992 from_mode = DImode;
993 }
994 else
995 {
996 #ifdef HAVE_extendpdidi2
997 if (HAVE_extendpdidi2)
998 {
999 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_extendpdidi2 */
1003 abort ();
1004 }
1005 }
1006
1007 /* Now follow all the conversions between integers
1008 no more than a word long. */
1009
1010 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1011 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1012 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1013 GET_MODE_BITSIZE (from_mode)))
1014 {
1015 if (!((GET_CODE (from) == MEM
1016 && ! MEM_VOLATILE_P (from)
1017 && direct_load[(int) to_mode]
1018 && ! mode_dependent_address_p (XEXP (from, 0)))
1019 || GET_CODE (from) == REG
1020 || GET_CODE (from) == SUBREG))
1021 from = force_reg (from_mode, from);
1022 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1023 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1024 from = copy_to_reg (from);
1025 emit_move_insn (to, gen_lowpart (to_mode, from));
1026 return;
1027 }
1028
1029 /* Handle extension. */
1030 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1031 {
1032 /* Convert directly if that works. */
1033 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1034 != CODE_FOR_nothing)
1035 {
1036 emit_unop_insn (code, to, from, equiv_code);
1037 return;
1038 }
1039 else
1040 {
1041 enum machine_mode intermediate;
1042 rtx tmp;
1043 tree shift_amount;
1044
1045 /* Search for a mode to convert via. */
1046 for (intermediate = from_mode; intermediate != VOIDmode;
1047 intermediate = GET_MODE_WIDER_MODE (intermediate))
1048 if (((can_extend_p (to_mode, intermediate, unsignedp)
1049 != CODE_FOR_nothing)
1050 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1051 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1052 GET_MODE_BITSIZE (intermediate))))
1053 && (can_extend_p (intermediate, from_mode, unsignedp)
1054 != CODE_FOR_nothing))
1055 {
1056 convert_move (to, convert_to_mode (intermediate, from,
1057 unsignedp), unsignedp);
1058 return;
1059 }
1060
1061 /* No suitable intermediate mode.
1062 Generate what we need with shifts. */
1063 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1064 - GET_MODE_BITSIZE (from_mode), 0);
1065 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1066 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1067 to, unsignedp);
1068 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1069 to, unsignedp);
1070 if (tmp != to)
1071 emit_move_insn (to, tmp);
1072 return;
1073 }
1074 }
1075
1076 /* Support special truncate insns for certain modes. */
1077
1078 if (from_mode == DImode && to_mode == SImode)
1079 {
1080 #ifdef HAVE_truncdisi2
1081 if (HAVE_truncdisi2)
1082 {
1083 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1084 return;
1085 }
1086 #endif
1087 convert_move (to, force_reg (from_mode, from), unsignedp);
1088 return;
1089 }
1090
1091 if (from_mode == DImode && to_mode == HImode)
1092 {
1093 #ifdef HAVE_truncdihi2
1094 if (HAVE_truncdihi2)
1095 {
1096 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1097 return;
1098 }
1099 #endif
1100 convert_move (to, force_reg (from_mode, from), unsignedp);
1101 return;
1102 }
1103
1104 if (from_mode == DImode && to_mode == QImode)
1105 {
1106 #ifdef HAVE_truncdiqi2
1107 if (HAVE_truncdiqi2)
1108 {
1109 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1110 return;
1111 }
1112 #endif
1113 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 return;
1115 }
1116
1117 if (from_mode == SImode && to_mode == HImode)
1118 {
1119 #ifdef HAVE_truncsihi2
1120 if (HAVE_truncsihi2)
1121 {
1122 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1123 return;
1124 }
1125 #endif
1126 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 return;
1128 }
1129
1130 if (from_mode == SImode && to_mode == QImode)
1131 {
1132 #ifdef HAVE_truncsiqi2
1133 if (HAVE_truncsiqi2)
1134 {
1135 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1136 return;
1137 }
1138 #endif
1139 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 return;
1141 }
1142
1143 if (from_mode == HImode && to_mode == QImode)
1144 {
1145 #ifdef HAVE_trunchiqi2
1146 if (HAVE_trunchiqi2)
1147 {
1148 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1149 return;
1150 }
1151 #endif
1152 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 return;
1154 }
1155
1156 if (from_mode == TImode && to_mode == DImode)
1157 {
1158 #ifdef HAVE_trunctidi2
1159 if (HAVE_trunctidi2)
1160 {
1161 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1162 return;
1163 }
1164 #endif
1165 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 return;
1167 }
1168
1169 if (from_mode == TImode && to_mode == SImode)
1170 {
1171 #ifdef HAVE_trunctisi2
1172 if (HAVE_trunctisi2)
1173 {
1174 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1175 return;
1176 }
1177 #endif
1178 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 return;
1180 }
1181
1182 if (from_mode == TImode && to_mode == HImode)
1183 {
1184 #ifdef HAVE_trunctihi2
1185 if (HAVE_trunctihi2)
1186 {
1187 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1188 return;
1189 }
1190 #endif
1191 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 return;
1193 }
1194
1195 if (from_mode == TImode && to_mode == QImode)
1196 {
1197 #ifdef HAVE_trunctiqi2
1198 if (HAVE_trunctiqi2)
1199 {
1200 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1201 return;
1202 }
1203 #endif
1204 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 return;
1206 }
1207
1208 /* Handle truncation of volatile memrefs, and so on;
1209 the things that couldn't be truncated directly,
1210 and for which there was no special instruction. */
1211 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1212 {
1213 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1214 emit_move_insn (to, temp);
1215 return;
1216 }
1217
1218 /* Mode combination is not recognized. */
1219 abort ();
1220 }
1221
1222 /* Return an rtx for a value that would result
1223 from converting X to mode MODE.
1224 Both X and MODE may be floating, or both integer.
1225 UNSIGNEDP is nonzero if X is an unsigned value.
1226 This can be done by referring to a part of X in place
1227 or by copying to a new temporary with conversion.
1228
1229 This function *must not* call protect_from_queue
1230 except when putting X into an insn (in which case convert_move does it). */
1231
1232 rtx
1233 convert_to_mode (mode, x, unsignedp)
1234 enum machine_mode mode;
1235 rtx x;
1236 int unsignedp;
1237 {
1238 return convert_modes (mode, VOIDmode, x, unsignedp);
1239 }
1240
1241 /* Return an rtx for a value that would result
1242 from converting X from mode OLDMODE to mode MODE.
1243 Both modes may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245
1246 This can be done by referring to a part of X in place
1247 or by copying to a new temporary with conversion.
1248
1249 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1250
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1253
1254 rtx
1255 convert_modes (mode, oldmode, x, unsignedp)
1256 enum machine_mode mode, oldmode;
1257 rtx x;
1258 int unsignedp;
1259 {
1260 register rtx temp;
1261
1262 /* If FROM is a SUBREG that indicates that we have already done at least
1263 the required extension, strip it. */
1264
1265 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1266 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1267 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1268 x = gen_lowpart (mode, x);
1269
1270 if (GET_MODE (x) != VOIDmode)
1271 oldmode = GET_MODE (x);
1272
1273 if (mode == oldmode)
1274 return x;
1275
1276 /* There is one case that we must handle specially: If we are converting
1277 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1278 we are to interpret the constant as unsigned, gen_lowpart will do
1279 the wrong if the constant appears negative. What we want to do is
1280 make the high-order word of the constant zero, not all ones. */
1281
1282 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1283 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1284 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1285 {
1286 HOST_WIDE_INT val = INTVAL (x);
1287
1288 if (oldmode != VOIDmode
1289 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1290 {
1291 int width = GET_MODE_BITSIZE (oldmode);
1292
1293 /* We need to zero extend VAL. */
1294 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1295 }
1296
1297 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1298 }
1299
1300 /* We can do this with a gen_lowpart if both desired and current modes
1301 are integer, and this is either a constant integer, a register, or a
1302 non-volatile MEM. Except for the constant case where MODE is no
1303 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1304
1305 if ((GET_CODE (x) == CONST_INT
1306 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1307 || (GET_MODE_CLASS (mode) == MODE_INT
1308 && GET_MODE_CLASS (oldmode) == MODE_INT
1309 && (GET_CODE (x) == CONST_DOUBLE
1310 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1311 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1312 && direct_load[(int) mode])
1313 || (GET_CODE (x) == REG
1314 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1315 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1316 {
1317 /* ?? If we don't know OLDMODE, we have to assume here that
1318 X does not need sign- or zero-extension. This may not be
1319 the case, but it's the best we can do. */
1320 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1321 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1322 {
1323 HOST_WIDE_INT val = INTVAL (x);
1324 int width = GET_MODE_BITSIZE (oldmode);
1325
1326 /* We must sign or zero-extend in this case. Start by
1327 zero-extending, then sign extend if we need to. */
1328 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1329 if (! unsignedp
1330 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1331 val |= (HOST_WIDE_INT) (-1) << width;
1332
1333 return GEN_INT (val);
1334 }
1335
1336 return gen_lowpart (mode, x);
1337 }
1338
1339 temp = gen_reg_rtx (mode);
1340 convert_move (temp, x, unsignedp);
1341 return temp;
1342 }
1343 \f
1344
1345 /* This macro is used to determine what the largest unit size that
1346 move_by_pieces can use is. */
1347
1348 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1349 move efficiently, as opposed to MOVE_MAX which is the maximum
1350 number of bhytes we can move with a single instruction. */
1351
1352 #ifndef MOVE_MAX_PIECES
1353 #define MOVE_MAX_PIECES MOVE_MAX
1354 #endif
1355
1356 /* Generate several move instructions to copy LEN bytes
1357 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1358 The caller must pass FROM and TO
1359 through protect_from_queue before calling.
1360 ALIGN (in bytes) is maximum alignment we can assume. */
1361
1362 void
1363 move_by_pieces (to, from, len, align)
1364 rtx to, from;
1365 int len, align;
1366 {
1367 struct move_by_pieces data;
1368 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1369 int max_size = MOVE_MAX_PIECES + 1;
1370 enum machine_mode mode = VOIDmode, tmode;
1371 enum insn_code icode;
1372
1373 data.offset = 0;
1374 data.to_addr = to_addr;
1375 data.from_addr = from_addr;
1376 data.to = to;
1377 data.from = from;
1378 data.autinc_to
1379 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1380 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1381 data.autinc_from
1382 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1383 || GET_CODE (from_addr) == POST_INC
1384 || GET_CODE (from_addr) == POST_DEC);
1385
1386 data.explicit_inc_from = 0;
1387 data.explicit_inc_to = 0;
1388 data.reverse
1389 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1390 if (data.reverse) data.offset = len;
1391 data.len = len;
1392
1393 data.to_struct = MEM_IN_STRUCT_P (to);
1394 data.from_struct = MEM_IN_STRUCT_P (from);
1395 data.to_readonly = RTX_UNCHANGING_P (to);
1396 data.from_readonly = RTX_UNCHANGING_P (from);
1397
1398 /* If copying requires more than two move insns,
1399 copy addresses to registers (to make displacements shorter)
1400 and use post-increment if available. */
1401 if (!(data.autinc_from && data.autinc_to)
1402 && move_by_pieces_ninsns (len, align) > 2)
1403 {
1404 /* Find the mode of the largest move... */
1405 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1406 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1407 if (GET_MODE_SIZE (tmode) < max_size)
1408 mode = tmode;
1409
1410 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1411 {
1412 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1413 data.autinc_from = 1;
1414 data.explicit_inc_from = -1;
1415 }
1416 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1417 {
1418 data.from_addr = copy_addr_to_reg (from_addr);
1419 data.autinc_from = 1;
1420 data.explicit_inc_from = 1;
1421 }
1422 if (!data.autinc_from && CONSTANT_P (from_addr))
1423 data.from_addr = copy_addr_to_reg (from_addr);
1424 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1425 {
1426 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1427 data.autinc_to = 1;
1428 data.explicit_inc_to = -1;
1429 }
1430 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1431 {
1432 data.to_addr = copy_addr_to_reg (to_addr);
1433 data.autinc_to = 1;
1434 data.explicit_inc_to = 1;
1435 }
1436 if (!data.autinc_to && CONSTANT_P (to_addr))
1437 data.to_addr = copy_addr_to_reg (to_addr);
1438 }
1439
1440 if (! SLOW_UNALIGNED_ACCESS
1441 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1442 align = MOVE_MAX;
1443
1444 /* First move what we can in the largest integer mode, then go to
1445 successively smaller modes. */
1446
1447 while (max_size > 1)
1448 {
1449 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1450 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1451 if (GET_MODE_SIZE (tmode) < max_size)
1452 mode = tmode;
1453
1454 if (mode == VOIDmode)
1455 break;
1456
1457 icode = mov_optab->handlers[(int) mode].insn_code;
1458 if (icode != CODE_FOR_nothing
1459 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1460 GET_MODE_SIZE (mode)))
1461 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1462
1463 max_size = GET_MODE_SIZE (mode);
1464 }
1465
1466 /* The code above should have handled everything. */
1467 if (data.len > 0)
1468 abort ();
1469 }
1470
1471 /* Return number of insns required to move L bytes by pieces.
1472 ALIGN (in bytes) is maximum alignment we can assume. */
1473
1474 static int
1475 move_by_pieces_ninsns (l, align)
1476 unsigned int l;
1477 int align;
1478 {
1479 register int n_insns = 0;
1480 int max_size = MOVE_MAX + 1;
1481
1482 if (! SLOW_UNALIGNED_ACCESS
1483 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1484 align = MOVE_MAX;
1485
1486 while (max_size > 1)
1487 {
1488 enum machine_mode mode = VOIDmode, tmode;
1489 enum insn_code icode;
1490
1491 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1492 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1493 if (GET_MODE_SIZE (tmode) < max_size)
1494 mode = tmode;
1495
1496 if (mode == VOIDmode)
1497 break;
1498
1499 icode = mov_optab->handlers[(int) mode].insn_code;
1500 if (icode != CODE_FOR_nothing
1501 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1502 GET_MODE_SIZE (mode)))
1503 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1504
1505 max_size = GET_MODE_SIZE (mode);
1506 }
1507
1508 return n_insns;
1509 }
1510
1511 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1512 with move instructions for mode MODE. GENFUN is the gen_... function
1513 to make a move insn for that mode. DATA has all the other info. */
1514
1515 static void
1516 move_by_pieces_1 (genfun, mode, data)
1517 rtx (*genfun) PROTO ((rtx, ...));
1518 enum machine_mode mode;
1519 struct move_by_pieces *data;
1520 {
1521 register int size = GET_MODE_SIZE (mode);
1522 register rtx to1, from1;
1523
1524 while (data->len >= size)
1525 {
1526 if (data->reverse) data->offset -= size;
1527
1528 to1 = (data->autinc_to
1529 ? gen_rtx_MEM (mode, data->to_addr)
1530 : copy_rtx (change_address (data->to, mode,
1531 plus_constant (data->to_addr,
1532 data->offset))));
1533 MEM_IN_STRUCT_P (to1) = data->to_struct;
1534 RTX_UNCHANGING_P (to1) = data->to_readonly;
1535
1536 from1
1537 = (data->autinc_from
1538 ? gen_rtx_MEM (mode, data->from_addr)
1539 : copy_rtx (change_address (data->from, mode,
1540 plus_constant (data->from_addr,
1541 data->offset))));
1542 MEM_IN_STRUCT_P (from1) = data->from_struct;
1543 RTX_UNCHANGING_P (from1) = data->from_readonly;
1544
1545 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1546 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1547 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1548 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1549
1550 emit_insn ((*genfun) (to1, from1));
1551 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1552 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1553 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1554 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1555
1556 if (! data->reverse) data->offset += size;
1557
1558 data->len -= size;
1559 }
1560 }
1561 \f
1562 /* Emit code to move a block Y to a block X.
1563 This may be done with string-move instructions,
1564 with multiple scalar move instructions, or with a library call.
1565
1566 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1567 with mode BLKmode.
1568 SIZE is an rtx that says how long they are.
1569 ALIGN is the maximum alignment we can assume they have,
1570 measured in bytes.
1571
1572 Return the address of the new block, if memcpy is called and returns it,
1573 0 otherwise. */
1574
1575 rtx
1576 emit_block_move (x, y, size, align)
1577 rtx x, y;
1578 rtx size;
1579 int align;
1580 {
1581 rtx retval = 0;
1582 #ifdef TARGET_MEM_FUNCTIONS
1583 static tree fn;
1584 tree call_expr, arg_list;
1585 #endif
1586
1587 if (GET_MODE (x) != BLKmode)
1588 abort ();
1589
1590 if (GET_MODE (y) != BLKmode)
1591 abort ();
1592
1593 x = protect_from_queue (x, 1);
1594 y = protect_from_queue (y, 0);
1595 size = protect_from_queue (size, 0);
1596
1597 if (GET_CODE (x) != MEM)
1598 abort ();
1599 if (GET_CODE (y) != MEM)
1600 abort ();
1601 if (size == 0)
1602 abort ();
1603
1604 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1605 move_by_pieces (x, y, INTVAL (size), align);
1606 else
1607 {
1608 /* Try the most limited insn first, because there's no point
1609 including more than one in the machine description unless
1610 the more limited one has some advantage. */
1611
1612 rtx opalign = GEN_INT (align);
1613 enum machine_mode mode;
1614
1615 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1616 mode = GET_MODE_WIDER_MODE (mode))
1617 {
1618 enum insn_code code = movstr_optab[(int) mode];
1619 insn_operand_predicate_fn pred;
1620
1621 if (code != CODE_FOR_nothing
1622 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1623 here because if SIZE is less than the mode mask, as it is
1624 returned by the macro, it will definitely be less than the
1625 actual mode mask. */
1626 && ((GET_CODE (size) == CONST_INT
1627 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1628 <= (GET_MODE_MASK (mode) >> 1)))
1629 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1630 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1631 || (*pred) (x, BLKmode))
1632 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1633 || (*pred) (y, BLKmode))
1634 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1635 || (*pred) (opalign, VOIDmode)))
1636 {
1637 rtx op2;
1638 rtx last = get_last_insn ();
1639 rtx pat;
1640
1641 op2 = convert_to_mode (mode, size, 1);
1642 pred = insn_data[(int) code].operand[2].predicate;
1643 if (pred != 0 && ! (*pred) (op2, mode))
1644 op2 = copy_to_mode_reg (mode, op2);
1645
1646 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1647 if (pat)
1648 {
1649 emit_insn (pat);
1650 return 0;
1651 }
1652 else
1653 delete_insns_since (last);
1654 }
1655 }
1656
1657 /* X, Y, or SIZE may have been passed through protect_from_queue.
1658
1659 It is unsafe to save the value generated by protect_from_queue
1660 and reuse it later. Consider what happens if emit_queue is
1661 called before the return value from protect_from_queue is used.
1662
1663 Expansion of the CALL_EXPR below will call emit_queue before
1664 we are finished emitting RTL for argument setup. So if we are
1665 not careful we could get the wrong value for an argument.
1666
1667 To avoid this problem we go ahead and emit code to copy X, Y &
1668 SIZE into new pseudos. We can then place those new pseudos
1669 into an RTL_EXPR and use them later, even after a call to
1670 emit_queue.
1671
1672 Note this is not strictly needed for library calls since they
1673 do not call emit_queue before loading their arguments. However,
1674 we may need to have library calls call emit_queue in the future
1675 since failing to do so could cause problems for targets which
1676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1677 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1678 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1679
1680 #ifdef TARGET_MEM_FUNCTIONS
1681 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1682 #else
1683 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1684 TREE_UNSIGNED (integer_type_node));
1685 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1686 #endif
1687
1688 #ifdef TARGET_MEM_FUNCTIONS
1689 /* It is incorrect to use the libcall calling conventions to call
1690 memcpy in this context.
1691
1692 This could be a user call to memcpy and the user may wish to
1693 examine the return value from memcpy.
1694
1695 For targets where libcalls and normal calls have different conventions
1696 for returning pointers, we could end up generating incorrect code.
1697
1698 So instead of using a libcall sequence we build up a suitable
1699 CALL_EXPR and expand the call in the normal fashion. */
1700 if (fn == NULL_TREE)
1701 {
1702 tree fntype;
1703
1704 /* This was copied from except.c, I don't know if all this is
1705 necessary in this context or not. */
1706 fn = get_identifier ("memcpy");
1707 push_obstacks_nochange ();
1708 end_temporary_allocation ();
1709 fntype = build_pointer_type (void_type_node);
1710 fntype = build_function_type (fntype, NULL_TREE);
1711 fn = build_decl (FUNCTION_DECL, fn, fntype);
1712 ggc_add_tree_root (&fn, 1);
1713 DECL_EXTERNAL (fn) = 1;
1714 TREE_PUBLIC (fn) = 1;
1715 DECL_ARTIFICIAL (fn) = 1;
1716 make_decl_rtl (fn, NULL_PTR, 1);
1717 assemble_external (fn);
1718 pop_obstacks ();
1719 }
1720
1721 /* We need to make an argument list for the function call.
1722
1723 memcpy has three arguments, the first two are void * addresses and
1724 the last is a size_t byte count for the copy. */
1725 arg_list
1726 = build_tree_list (NULL_TREE,
1727 make_tree (build_pointer_type (void_type_node), x));
1728 TREE_CHAIN (arg_list)
1729 = build_tree_list (NULL_TREE,
1730 make_tree (build_pointer_type (void_type_node), y));
1731 TREE_CHAIN (TREE_CHAIN (arg_list))
1732 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1733 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1734
1735 /* Now we have to build up the CALL_EXPR itself. */
1736 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1737 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1738 call_expr, arg_list, NULL_TREE);
1739 TREE_SIDE_EFFECTS (call_expr) = 1;
1740
1741 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1742 #else
1743 emit_library_call (bcopy_libfunc, 0,
1744 VOIDmode, 3, y, Pmode, x, Pmode,
1745 convert_to_mode (TYPE_MODE (integer_type_node), size,
1746 TREE_UNSIGNED (integer_type_node)),
1747 TYPE_MODE (integer_type_node));
1748 #endif
1749 }
1750
1751 return retval;
1752 }
1753 \f
1754 /* Copy all or part of a value X into registers starting at REGNO.
1755 The number of registers to be filled is NREGS. */
1756
1757 void
1758 move_block_to_reg (regno, x, nregs, mode)
1759 int regno;
1760 rtx x;
1761 int nregs;
1762 enum machine_mode mode;
1763 {
1764 int i;
1765 #ifdef HAVE_load_multiple
1766 rtx pat;
1767 rtx last;
1768 #endif
1769
1770 if (nregs == 0)
1771 return;
1772
1773 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1774 x = validize_mem (force_const_mem (mode, x));
1775
1776 /* See if the machine can do this with a load multiple insn. */
1777 #ifdef HAVE_load_multiple
1778 if (HAVE_load_multiple)
1779 {
1780 last = get_last_insn ();
1781 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1782 GEN_INT (nregs));
1783 if (pat)
1784 {
1785 emit_insn (pat);
1786 return;
1787 }
1788 else
1789 delete_insns_since (last);
1790 }
1791 #endif
1792
1793 for (i = 0; i < nregs; i++)
1794 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1795 operand_subword_force (x, i, mode));
1796 }
1797
1798 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1799 The number of registers to be filled is NREGS. SIZE indicates the number
1800 of bytes in the object X. */
1801
1802
1803 void
1804 move_block_from_reg (regno, x, nregs, size)
1805 int regno;
1806 rtx x;
1807 int nregs;
1808 int size;
1809 {
1810 int i;
1811 #ifdef HAVE_store_multiple
1812 rtx pat;
1813 rtx last;
1814 #endif
1815 enum machine_mode mode;
1816
1817 /* If SIZE is that of a mode no bigger than a word, just use that
1818 mode's store operation. */
1819 if (size <= UNITS_PER_WORD
1820 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1821 {
1822 emit_move_insn (change_address (x, mode, NULL),
1823 gen_rtx_REG (mode, regno));
1824 return;
1825 }
1826
1827 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1828 to the left before storing to memory. Note that the previous test
1829 doesn't handle all cases (e.g. SIZE == 3). */
1830 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1831 {
1832 rtx tem = operand_subword (x, 0, 1, BLKmode);
1833 rtx shift;
1834
1835 if (tem == 0)
1836 abort ();
1837
1838 shift = expand_shift (LSHIFT_EXPR, word_mode,
1839 gen_rtx_REG (word_mode, regno),
1840 build_int_2 ((UNITS_PER_WORD - size)
1841 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1842 emit_move_insn (tem, shift);
1843 return;
1844 }
1845
1846 /* See if the machine can do this with a store multiple insn. */
1847 #ifdef HAVE_store_multiple
1848 if (HAVE_store_multiple)
1849 {
1850 last = get_last_insn ();
1851 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1852 GEN_INT (nregs));
1853 if (pat)
1854 {
1855 emit_insn (pat);
1856 return;
1857 }
1858 else
1859 delete_insns_since (last);
1860 }
1861 #endif
1862
1863 for (i = 0; i < nregs; i++)
1864 {
1865 rtx tem = operand_subword (x, i, 1, BLKmode);
1866
1867 if (tem == 0)
1868 abort ();
1869
1870 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1871 }
1872 }
1873
1874 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1875 registers represented by a PARALLEL. SSIZE represents the total size of
1876 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1877 SRC in bits. */
1878 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1879 the balance will be in what would be the low-order memory addresses, i.e.
1880 left justified for big endian, right justified for little endian. This
1881 happens to be true for the targets currently using this support. If this
1882 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1883 would be needed. */
1884
1885 void
1886 emit_group_load (dst, orig_src, ssize, align)
1887 rtx dst, orig_src;
1888 int align, ssize;
1889 {
1890 rtx *tmps, src;
1891 int start, i;
1892
1893 if (GET_CODE (dst) != PARALLEL)
1894 abort ();
1895
1896 /* Check for a NULL entry, used to indicate that the parameter goes
1897 both on the stack and in registers. */
1898 if (XEXP (XVECEXP (dst, 0, 0), 0))
1899 start = 0;
1900 else
1901 start = 1;
1902
1903 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1904
1905 /* If we won't be loading directly from memory, protect the real source
1906 from strange tricks we might play. */
1907 src = orig_src;
1908 if (GET_CODE (src) != MEM)
1909 {
1910 if (GET_CODE (src) == VOIDmode)
1911 src = gen_reg_rtx (GET_MODE (dst));
1912 else
1913 src = gen_reg_rtx (GET_MODE (orig_src));
1914 emit_move_insn (src, orig_src);
1915 }
1916
1917 /* Process the pieces. */
1918 for (i = start; i < XVECLEN (dst, 0); i++)
1919 {
1920 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1921 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1922 int bytelen = GET_MODE_SIZE (mode);
1923 int shift = 0;
1924
1925 /* Handle trailing fragments that run over the size of the struct. */
1926 if (ssize >= 0 && bytepos + bytelen > ssize)
1927 {
1928 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1929 bytelen = ssize - bytepos;
1930 if (bytelen <= 0)
1931 abort();
1932 }
1933
1934 /* Optimize the access just a bit. */
1935 if (GET_CODE (src) == MEM
1936 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1937 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1938 && bytelen == GET_MODE_SIZE (mode))
1939 {
1940 tmps[i] = gen_reg_rtx (mode);
1941 emit_move_insn (tmps[i],
1942 change_address (src, mode,
1943 plus_constant (XEXP (src, 0),
1944 bytepos)));
1945 }
1946 else if (GET_CODE (src) == CONCAT)
1947 {
1948 if (bytepos == 0
1949 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1950 tmps[i] = XEXP (src, 0);
1951 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1952 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1953 tmps[i] = XEXP (src, 1);
1954 else
1955 abort ();
1956 }
1957 else
1958 {
1959 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1960 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1961 mode, mode, align, ssize);
1962 }
1963
1964 if (BYTES_BIG_ENDIAN && shift)
1965 {
1966 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1967 tmps[i], 0, OPTAB_WIDEN);
1968 }
1969 }
1970 emit_queue();
1971
1972 /* Copy the extracted pieces into the proper (probable) hard regs. */
1973 for (i = start; i < XVECLEN (dst, 0); i++)
1974 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1975 }
1976
1977 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1978 registers represented by a PARALLEL. SSIZE represents the total size of
1979 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1980
1981 void
1982 emit_group_store (orig_dst, src, ssize, align)
1983 rtx orig_dst, src;
1984 int ssize, align;
1985 {
1986 rtx *tmps, dst;
1987 int start, i;
1988
1989 if (GET_CODE (src) != PARALLEL)
1990 abort ();
1991
1992 /* Check for a NULL entry, used to indicate that the parameter goes
1993 both on the stack and in registers. */
1994 if (XEXP (XVECEXP (src, 0, 0), 0))
1995 start = 0;
1996 else
1997 start = 1;
1998
1999 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2000
2001 /* Copy the (probable) hard regs into pseudos. */
2002 for (i = start; i < XVECLEN (src, 0); i++)
2003 {
2004 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2005 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2006 emit_move_insn (tmps[i], reg);
2007 }
2008 emit_queue();
2009
2010 /* If we won't be storing directly into memory, protect the real destination
2011 from strange tricks we might play. */
2012 dst = orig_dst;
2013 if (GET_CODE (dst) == PARALLEL)
2014 {
2015 rtx temp;
2016
2017 /* We can get a PARALLEL dst if there is a conditional expression in
2018 a return statement. In that case, the dst and src are the same,
2019 so no action is necessary. */
2020 if (rtx_equal_p (dst, src))
2021 return;
2022
2023 /* It is unclear if we can ever reach here, but we may as well handle
2024 it. Allocate a temporary, and split this into a store/load to/from
2025 the temporary. */
2026
2027 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2028 emit_group_store (temp, src, ssize, align);
2029 emit_group_load (dst, temp, ssize, align);
2030 return;
2031 }
2032 else if (GET_CODE (dst) != MEM)
2033 {
2034 dst = gen_reg_rtx (GET_MODE (orig_dst));
2035 /* Make life a bit easier for combine. */
2036 emit_move_insn (dst, const0_rtx);
2037 }
2038 else if (! MEM_IN_STRUCT_P (dst))
2039 {
2040 /* store_bit_field requires that memory operations have
2041 mem_in_struct_p set; we might not. */
2042
2043 dst = copy_rtx (orig_dst);
2044 MEM_SET_IN_STRUCT_P (dst, 1);
2045 }
2046
2047 /* Process the pieces. */
2048 for (i = start; i < XVECLEN (src, 0); i++)
2049 {
2050 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2051 enum machine_mode mode = GET_MODE (tmps[i]);
2052 int bytelen = GET_MODE_SIZE (mode);
2053
2054 /* Handle trailing fragments that run over the size of the struct. */
2055 if (ssize >= 0 && bytepos + bytelen > ssize)
2056 {
2057 if (BYTES_BIG_ENDIAN)
2058 {
2059 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2060 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2061 tmps[i], 0, OPTAB_WIDEN);
2062 }
2063 bytelen = ssize - bytepos;
2064 }
2065
2066 /* Optimize the access just a bit. */
2067 if (GET_CODE (dst) == MEM
2068 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2069 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2070 && bytelen == GET_MODE_SIZE (mode))
2071 {
2072 emit_move_insn (change_address (dst, mode,
2073 plus_constant (XEXP (dst, 0),
2074 bytepos)),
2075 tmps[i]);
2076 }
2077 else
2078 {
2079 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2080 mode, tmps[i], align, ssize);
2081 }
2082 }
2083 emit_queue();
2084
2085 /* Copy from the pseudo into the (probable) hard reg. */
2086 if (GET_CODE (dst) == REG)
2087 emit_move_insn (orig_dst, dst);
2088 }
2089
2090 /* Generate code to copy a BLKmode object of TYPE out of a
2091 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2092 is null, a stack temporary is created. TGTBLK is returned.
2093
2094 The primary purpose of this routine is to handle functions
2095 that return BLKmode structures in registers. Some machines
2096 (the PA for example) want to return all small structures
2097 in registers regardless of the structure's alignment.
2098 */
2099
2100 rtx
2101 copy_blkmode_from_reg(tgtblk,srcreg,type)
2102 rtx tgtblk;
2103 rtx srcreg;
2104 tree type;
2105 {
2106 int bytes = int_size_in_bytes (type);
2107 rtx src = NULL, dst = NULL;
2108 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2109 int bitpos, xbitpos, big_endian_correction = 0;
2110
2111 if (tgtblk == 0)
2112 {
2113 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2114 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2115 preserve_temp_slots (tgtblk);
2116 }
2117
2118 /* This code assumes srcreg is at least a full word. If it isn't,
2119 copy it into a new pseudo which is a full word. */
2120 if (GET_MODE (srcreg) != BLKmode
2121 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2122 srcreg = convert_to_mode (word_mode, srcreg,
2123 TREE_UNSIGNED (type));
2124
2125 /* Structures whose size is not a multiple of a word are aligned
2126 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2127 machine, this means we must skip the empty high order bytes when
2128 calculating the bit offset. */
2129 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2130 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2131 * BITS_PER_UNIT));
2132
2133 /* Copy the structure BITSIZE bites at a time.
2134
2135 We could probably emit more efficient code for machines
2136 which do not use strict alignment, but it doesn't seem
2137 worth the effort at the current time. */
2138 for (bitpos = 0, xbitpos = big_endian_correction;
2139 bitpos < bytes * BITS_PER_UNIT;
2140 bitpos += bitsize, xbitpos += bitsize)
2141 {
2142
2143 /* We need a new source operand each time xbitpos is on a
2144 word boundary and when xbitpos == big_endian_correction
2145 (the first time through). */
2146 if (xbitpos % BITS_PER_WORD == 0
2147 || xbitpos == big_endian_correction)
2148 src = operand_subword_force (srcreg,
2149 xbitpos / BITS_PER_WORD,
2150 BLKmode);
2151
2152 /* We need a new destination operand each time bitpos is on
2153 a word boundary. */
2154 if (bitpos % BITS_PER_WORD == 0)
2155 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2156
2157 /* Use xbitpos for the source extraction (right justified) and
2158 xbitpos for the destination store (left justified). */
2159 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2160 extract_bit_field (src, bitsize,
2161 xbitpos % BITS_PER_WORD, 1,
2162 NULL_RTX, word_mode,
2163 word_mode,
2164 bitsize / BITS_PER_UNIT,
2165 BITS_PER_WORD),
2166 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2167 }
2168 return tgtblk;
2169 }
2170
2171
2172 /* Add a USE expression for REG to the (possibly empty) list pointed
2173 to by CALL_FUSAGE. REG must denote a hard register. */
2174
2175 void
2176 use_reg (call_fusage, reg)
2177 rtx *call_fusage, reg;
2178 {
2179 if (GET_CODE (reg) != REG
2180 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2181 abort();
2182
2183 *call_fusage
2184 = gen_rtx_EXPR_LIST (VOIDmode,
2185 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2186 }
2187
2188 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2189 starting at REGNO. All of these registers must be hard registers. */
2190
2191 void
2192 use_regs (call_fusage, regno, nregs)
2193 rtx *call_fusage;
2194 int regno;
2195 int nregs;
2196 {
2197 int i;
2198
2199 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2200 abort ();
2201
2202 for (i = 0; i < nregs; i++)
2203 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2204 }
2205
2206 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2207 PARALLEL REGS. This is for calls that pass values in multiple
2208 non-contiguous locations. The Irix 6 ABI has examples of this. */
2209
2210 void
2211 use_group_regs (call_fusage, regs)
2212 rtx *call_fusage;
2213 rtx regs;
2214 {
2215 int i;
2216
2217 for (i = 0; i < XVECLEN (regs, 0); i++)
2218 {
2219 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2220
2221 /* A NULL entry means the parameter goes both on the stack and in
2222 registers. This can also be a MEM for targets that pass values
2223 partially on the stack and partially in registers. */
2224 if (reg != 0 && GET_CODE (reg) == REG)
2225 use_reg (call_fusage, reg);
2226 }
2227 }
2228 \f
2229 /* Generate several move instructions to clear LEN bytes of block TO.
2230 (A MEM rtx with BLKmode). The caller must pass TO through
2231 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2232 we can assume. */
2233
2234 static void
2235 clear_by_pieces (to, len, align)
2236 rtx to;
2237 int len, align;
2238 {
2239 struct clear_by_pieces data;
2240 rtx to_addr = XEXP (to, 0);
2241 int max_size = MOVE_MAX_PIECES + 1;
2242 enum machine_mode mode = VOIDmode, tmode;
2243 enum insn_code icode;
2244
2245 data.offset = 0;
2246 data.to_addr = to_addr;
2247 data.to = to;
2248 data.autinc_to
2249 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2250 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2251
2252 data.explicit_inc_to = 0;
2253 data.reverse
2254 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2255 if (data.reverse) data.offset = len;
2256 data.len = len;
2257
2258 data.to_struct = MEM_IN_STRUCT_P (to);
2259
2260 /* If copying requires more than two move insns,
2261 copy addresses to registers (to make displacements shorter)
2262 and use post-increment if available. */
2263 if (!data.autinc_to
2264 && move_by_pieces_ninsns (len, align) > 2)
2265 {
2266 /* Determine the main mode we'll be using */
2267 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2268 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2269 if (GET_MODE_SIZE (tmode) < max_size)
2270 mode = tmode;
2271
2272 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2273 {
2274 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2275 data.autinc_to = 1;
2276 data.explicit_inc_to = -1;
2277 }
2278 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2279 {
2280 data.to_addr = copy_addr_to_reg (to_addr);
2281 data.autinc_to = 1;
2282 data.explicit_inc_to = 1;
2283 }
2284 if (!data.autinc_to && CONSTANT_P (to_addr))
2285 data.to_addr = copy_addr_to_reg (to_addr);
2286 }
2287
2288 if (! SLOW_UNALIGNED_ACCESS
2289 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2290 align = MOVE_MAX;
2291
2292 /* First move what we can in the largest integer mode, then go to
2293 successively smaller modes. */
2294
2295 while (max_size > 1)
2296 {
2297 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2298 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2299 if (GET_MODE_SIZE (tmode) < max_size)
2300 mode = tmode;
2301
2302 if (mode == VOIDmode)
2303 break;
2304
2305 icode = mov_optab->handlers[(int) mode].insn_code;
2306 if (icode != CODE_FOR_nothing
2307 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2308 GET_MODE_SIZE (mode)))
2309 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2310
2311 max_size = GET_MODE_SIZE (mode);
2312 }
2313
2314 /* The code above should have handled everything. */
2315 if (data.len != 0)
2316 abort ();
2317 }
2318
2319 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2320 with move instructions for mode MODE. GENFUN is the gen_... function
2321 to make a move insn for that mode. DATA has all the other info. */
2322
2323 static void
2324 clear_by_pieces_1 (genfun, mode, data)
2325 rtx (*genfun) PROTO ((rtx, ...));
2326 enum machine_mode mode;
2327 struct clear_by_pieces *data;
2328 {
2329 register int size = GET_MODE_SIZE (mode);
2330 register rtx to1;
2331
2332 while (data->len >= size)
2333 {
2334 if (data->reverse) data->offset -= size;
2335
2336 to1 = (data->autinc_to
2337 ? gen_rtx_MEM (mode, data->to_addr)
2338 : copy_rtx (change_address (data->to, mode,
2339 plus_constant (data->to_addr,
2340 data->offset))));
2341 MEM_IN_STRUCT_P (to1) = data->to_struct;
2342
2343 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2344 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2345
2346 emit_insn ((*genfun) (to1, const0_rtx));
2347 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2348 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2349
2350 if (! data->reverse) data->offset += size;
2351
2352 data->len -= size;
2353 }
2354 }
2355 \f
2356 /* Write zeros through the storage of OBJECT.
2357 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2358 the maximum alignment we can is has, measured in bytes.
2359
2360 If we call a function that returns the length of the block, return it. */
2361
2362 rtx
2363 clear_storage (object, size, align)
2364 rtx object;
2365 rtx size;
2366 int align;
2367 {
2368 #ifdef TARGET_MEM_FUNCTIONS
2369 static tree fn;
2370 tree call_expr, arg_list;
2371 #endif
2372 rtx retval = 0;
2373
2374 if (GET_MODE (object) == BLKmode)
2375 {
2376 object = protect_from_queue (object, 1);
2377 size = protect_from_queue (size, 0);
2378
2379 if (GET_CODE (size) == CONST_INT
2380 && MOVE_BY_PIECES_P (INTVAL (size), align))
2381 clear_by_pieces (object, INTVAL (size), align);
2382
2383 else
2384 {
2385 /* Try the most limited insn first, because there's no point
2386 including more than one in the machine description unless
2387 the more limited one has some advantage. */
2388
2389 rtx opalign = GEN_INT (align);
2390 enum machine_mode mode;
2391
2392 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2393 mode = GET_MODE_WIDER_MODE (mode))
2394 {
2395 enum insn_code code = clrstr_optab[(int) mode];
2396 insn_operand_predicate_fn pred;
2397
2398 if (code != CODE_FOR_nothing
2399 /* We don't need MODE to be narrower than
2400 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2401 the mode mask, as it is returned by the macro, it will
2402 definitely be less than the actual mode mask. */
2403 && ((GET_CODE (size) == CONST_INT
2404 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2405 <= (GET_MODE_MASK (mode) >> 1)))
2406 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2407 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2408 || (*pred) (object, BLKmode))
2409 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2410 || (*pred) (opalign, VOIDmode)))
2411 {
2412 rtx op1;
2413 rtx last = get_last_insn ();
2414 rtx pat;
2415
2416 op1 = convert_to_mode (mode, size, 1);
2417 pred = insn_data[(int) code].operand[1].predicate;
2418 if (pred != 0 && ! (*pred) (op1, mode))
2419 op1 = copy_to_mode_reg (mode, op1);
2420
2421 pat = GEN_FCN ((int) code) (object, op1, opalign);
2422 if (pat)
2423 {
2424 emit_insn (pat);
2425 return 0;
2426 }
2427 else
2428 delete_insns_since (last);
2429 }
2430 }
2431
2432 /* OBJECT or SIZE may have been passed through protect_from_queue.
2433
2434 It is unsafe to save the value generated by protect_from_queue
2435 and reuse it later. Consider what happens if emit_queue is
2436 called before the return value from protect_from_queue is used.
2437
2438 Expansion of the CALL_EXPR below will call emit_queue before
2439 we are finished emitting RTL for argument setup. So if we are
2440 not careful we could get the wrong value for an argument.
2441
2442 To avoid this problem we go ahead and emit code to copy OBJECT
2443 and SIZE into new pseudos. We can then place those new pseudos
2444 into an RTL_EXPR and use them later, even after a call to
2445 emit_queue.
2446
2447 Note this is not strictly needed for library calls since they
2448 do not call emit_queue before loading their arguments. However,
2449 we may need to have library calls call emit_queue in the future
2450 since failing to do so could cause problems for targets which
2451 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2452 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2453
2454 #ifdef TARGET_MEM_FUNCTIONS
2455 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2456 #else
2457 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2458 TREE_UNSIGNED (integer_type_node));
2459 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2460 #endif
2461
2462
2463 #ifdef TARGET_MEM_FUNCTIONS
2464 /* It is incorrect to use the libcall calling conventions to call
2465 memset in this context.
2466
2467 This could be a user call to memset and the user may wish to
2468 examine the return value from memset.
2469
2470 For targets where libcalls and normal calls have different
2471 conventions for returning pointers, we could end up generating
2472 incorrect code.
2473
2474 So instead of using a libcall sequence we build up a suitable
2475 CALL_EXPR and expand the call in the normal fashion. */
2476 if (fn == NULL_TREE)
2477 {
2478 tree fntype;
2479
2480 /* This was copied from except.c, I don't know if all this is
2481 necessary in this context or not. */
2482 fn = get_identifier ("memset");
2483 push_obstacks_nochange ();
2484 end_temporary_allocation ();
2485 fntype = build_pointer_type (void_type_node);
2486 fntype = build_function_type (fntype, NULL_TREE);
2487 fn = build_decl (FUNCTION_DECL, fn, fntype);
2488 ggc_add_tree_root (&fn, 1);
2489 DECL_EXTERNAL (fn) = 1;
2490 TREE_PUBLIC (fn) = 1;
2491 DECL_ARTIFICIAL (fn) = 1;
2492 make_decl_rtl (fn, NULL_PTR, 1);
2493 assemble_external (fn);
2494 pop_obstacks ();
2495 }
2496
2497 /* We need to make an argument list for the function call.
2498
2499 memset has three arguments, the first is a void * addresses, the
2500 second a integer with the initialization value, the last is a
2501 size_t byte count for the copy. */
2502 arg_list
2503 = build_tree_list (NULL_TREE,
2504 make_tree (build_pointer_type (void_type_node),
2505 object));
2506 TREE_CHAIN (arg_list)
2507 = build_tree_list (NULL_TREE,
2508 make_tree (integer_type_node, const0_rtx));
2509 TREE_CHAIN (TREE_CHAIN (arg_list))
2510 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2511 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2512
2513 /* Now we have to build up the CALL_EXPR itself. */
2514 call_expr = build1 (ADDR_EXPR,
2515 build_pointer_type (TREE_TYPE (fn)), fn);
2516 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2517 call_expr, arg_list, NULL_TREE);
2518 TREE_SIDE_EFFECTS (call_expr) = 1;
2519
2520 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2521 #else
2522 emit_library_call (bzero_libfunc, 0,
2523 VOIDmode, 2, object, Pmode, size,
2524 TYPE_MODE (integer_type_node));
2525 #endif
2526 }
2527 }
2528 else
2529 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2530
2531 return retval;
2532 }
2533
2534 /* Generate code to copy Y into X.
2535 Both Y and X must have the same mode, except that
2536 Y can be a constant with VOIDmode.
2537 This mode cannot be BLKmode; use emit_block_move for that.
2538
2539 Return the last instruction emitted. */
2540
2541 rtx
2542 emit_move_insn (x, y)
2543 rtx x, y;
2544 {
2545 enum machine_mode mode = GET_MODE (x);
2546
2547 x = protect_from_queue (x, 1);
2548 y = protect_from_queue (y, 0);
2549
2550 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2551 abort ();
2552
2553 /* Never force constant_p_rtx to memory. */
2554 if (GET_CODE (y) == CONSTANT_P_RTX)
2555 ;
2556 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2557 y = force_const_mem (mode, y);
2558
2559 /* If X or Y are memory references, verify that their addresses are valid
2560 for the machine. */
2561 if (GET_CODE (x) == MEM
2562 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2563 && ! push_operand (x, GET_MODE (x)))
2564 || (flag_force_addr
2565 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2566 x = change_address (x, VOIDmode, XEXP (x, 0));
2567
2568 if (GET_CODE (y) == MEM
2569 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2570 || (flag_force_addr
2571 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2572 y = change_address (y, VOIDmode, XEXP (y, 0));
2573
2574 if (mode == BLKmode)
2575 abort ();
2576
2577 return emit_move_insn_1 (x, y);
2578 }
2579
2580 /* Low level part of emit_move_insn.
2581 Called just like emit_move_insn, but assumes X and Y
2582 are basically valid. */
2583
2584 rtx
2585 emit_move_insn_1 (x, y)
2586 rtx x, y;
2587 {
2588 enum machine_mode mode = GET_MODE (x);
2589 enum machine_mode submode;
2590 enum mode_class class = GET_MODE_CLASS (mode);
2591 int i;
2592
2593 if (mode >= MAX_MACHINE_MODE)
2594 abort ();
2595
2596 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2597 return
2598 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2599
2600 /* Expand complex moves by moving real part and imag part, if possible. */
2601 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2602 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2603 * BITS_PER_UNIT),
2604 (class == MODE_COMPLEX_INT
2605 ? MODE_INT : MODE_FLOAT),
2606 0))
2607 && (mov_optab->handlers[(int) submode].insn_code
2608 != CODE_FOR_nothing))
2609 {
2610 /* Don't split destination if it is a stack push. */
2611 int stack = push_operand (x, GET_MODE (x));
2612
2613 /* If this is a stack, push the highpart first, so it
2614 will be in the argument order.
2615
2616 In that case, change_address is used only to convert
2617 the mode, not to change the address. */
2618 if (stack)
2619 {
2620 /* Note that the real part always precedes the imag part in memory
2621 regardless of machine's endianness. */
2622 #ifdef STACK_GROWS_DOWNWARD
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2625 gen_imagpart (submode, y)));
2626 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2627 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2628 gen_realpart (submode, y)));
2629 #else
2630 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2631 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2632 gen_realpart (submode, y)));
2633 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2634 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2635 gen_imagpart (submode, y)));
2636 #endif
2637 }
2638 else
2639 {
2640 rtx realpart_x, realpart_y;
2641 rtx imagpart_x, imagpart_y;
2642
2643 /* If this is a complex value with each part being smaller than a
2644 word, the usual calling sequence will likely pack the pieces into
2645 a single register. Unfortunately, SUBREG of hard registers only
2646 deals in terms of words, so we have a problem converting input
2647 arguments to the CONCAT of two registers that is used elsewhere
2648 for complex values. If this is before reload, we can copy it into
2649 memory and reload. FIXME, we should see about using extract and
2650 insert on integer registers, but complex short and complex char
2651 variables should be rarely used. */
2652 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2653 && (reload_in_progress | reload_completed) == 0)
2654 {
2655 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2656 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2657
2658 if (packed_dest_p || packed_src_p)
2659 {
2660 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2661 ? MODE_FLOAT : MODE_INT);
2662
2663 enum machine_mode reg_mode =
2664 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2665
2666 if (reg_mode != BLKmode)
2667 {
2668 rtx mem = assign_stack_temp (reg_mode,
2669 GET_MODE_SIZE (mode), 0);
2670
2671 rtx cmem = change_address (mem, mode, NULL_RTX);
2672
2673 cfun->cannot_inline = "function uses short complex types";
2674
2675 if (packed_dest_p)
2676 {
2677 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2678 emit_move_insn_1 (cmem, y);
2679 return emit_move_insn_1 (sreg, mem);
2680 }
2681 else
2682 {
2683 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2684 emit_move_insn_1 (mem, sreg);
2685 return emit_move_insn_1 (x, cmem);
2686 }
2687 }
2688 }
2689 }
2690
2691 realpart_x = gen_realpart (submode, x);
2692 realpart_y = gen_realpart (submode, y);
2693 imagpart_x = gen_imagpart (submode, x);
2694 imagpart_y = gen_imagpart (submode, y);
2695
2696 /* Show the output dies here. This is necessary for SUBREGs
2697 of pseudos since we cannot track their lifetimes correctly;
2698 hard regs shouldn't appear here except as return values.
2699 We never want to emit such a clobber after reload. */
2700 if (x != y
2701 && ! (reload_in_progress || reload_completed)
2702 && (GET_CODE (realpart_x) == SUBREG
2703 || GET_CODE (imagpart_x) == SUBREG))
2704 {
2705 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2706 }
2707
2708 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2709 (realpart_x, realpart_y));
2710 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2711 (imagpart_x, imagpart_y));
2712 }
2713
2714 return get_last_insn ();
2715 }
2716
2717 /* This will handle any multi-word mode that lacks a move_insn pattern.
2718 However, you will get better code if you define such patterns,
2719 even if they must turn into multiple assembler instructions. */
2720 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2721 {
2722 rtx last_insn = 0;
2723 rtx seq;
2724 int need_clobber;
2725
2726 #ifdef PUSH_ROUNDING
2727
2728 /* If X is a push on the stack, do the push now and replace
2729 X with a reference to the stack pointer. */
2730 if (push_operand (x, GET_MODE (x)))
2731 {
2732 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2733 x = change_address (x, VOIDmode, stack_pointer_rtx);
2734 }
2735 #endif
2736
2737 start_sequence ();
2738
2739 need_clobber = 0;
2740 for (i = 0;
2741 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2742 i++)
2743 {
2744 rtx xpart = operand_subword (x, i, 1, mode);
2745 rtx ypart = operand_subword (y, i, 1, mode);
2746
2747 /* If we can't get a part of Y, put Y into memory if it is a
2748 constant. Otherwise, force it into a register. If we still
2749 can't get a part of Y, abort. */
2750 if (ypart == 0 && CONSTANT_P (y))
2751 {
2752 y = force_const_mem (mode, y);
2753 ypart = operand_subword (y, i, 1, mode);
2754 }
2755 else if (ypart == 0)
2756 ypart = operand_subword_force (y, i, mode);
2757
2758 if (xpart == 0 || ypart == 0)
2759 abort ();
2760
2761 need_clobber |= (GET_CODE (xpart) == SUBREG);
2762
2763 last_insn = emit_move_insn (xpart, ypart);
2764 }
2765
2766 seq = gen_sequence ();
2767 end_sequence ();
2768
2769 /* Show the output dies here. This is necessary for SUBREGs
2770 of pseudos since we cannot track their lifetimes correctly;
2771 hard regs shouldn't appear here except as return values.
2772 We never want to emit such a clobber after reload. */
2773 if (x != y
2774 && ! (reload_in_progress || reload_completed)
2775 && need_clobber != 0)
2776 {
2777 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2778 }
2779
2780 emit_insn (seq);
2781
2782 return last_insn;
2783 }
2784 else
2785 abort ();
2786 }
2787 \f
2788 /* Pushing data onto the stack. */
2789
2790 /* Push a block of length SIZE (perhaps variable)
2791 and return an rtx to address the beginning of the block.
2792 Note that it is not possible for the value returned to be a QUEUED.
2793 The value may be virtual_outgoing_args_rtx.
2794
2795 EXTRA is the number of bytes of padding to push in addition to SIZE.
2796 BELOW nonzero means this padding comes at low addresses;
2797 otherwise, the padding comes at high addresses. */
2798
2799 rtx
2800 push_block (size, extra, below)
2801 rtx size;
2802 int extra, below;
2803 {
2804 register rtx temp;
2805
2806 size = convert_modes (Pmode, ptr_mode, size, 1);
2807 if (CONSTANT_P (size))
2808 anti_adjust_stack (plus_constant (size, extra));
2809 else if (GET_CODE (size) == REG && extra == 0)
2810 anti_adjust_stack (size);
2811 else
2812 {
2813 rtx temp = copy_to_mode_reg (Pmode, size);
2814 if (extra != 0)
2815 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2816 temp, 0, OPTAB_LIB_WIDEN);
2817 anti_adjust_stack (temp);
2818 }
2819
2820 #if defined (STACK_GROWS_DOWNWARD) \
2821 || (defined (ARGS_GROW_DOWNWARD) \
2822 && !defined (ACCUMULATE_OUTGOING_ARGS))
2823
2824 /* Return the lowest stack address when STACK or ARGS grow downward and
2825 we are not aaccumulating outgoing arguments (the c4x port uses such
2826 conventions). */
2827 temp = virtual_outgoing_args_rtx;
2828 if (extra != 0 && below)
2829 temp = plus_constant (temp, extra);
2830 #else
2831 if (GET_CODE (size) == CONST_INT)
2832 temp = plus_constant (virtual_outgoing_args_rtx,
2833 - INTVAL (size) - (below ? 0 : extra));
2834 else if (extra != 0 && !below)
2835 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2836 negate_rtx (Pmode, plus_constant (size, extra)));
2837 else
2838 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2839 negate_rtx (Pmode, size));
2840 #endif
2841
2842 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2843 }
2844
2845 rtx
2846 gen_push_operand ()
2847 {
2848 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2849 }
2850
2851 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2852 block of SIZE bytes. */
2853
2854 static rtx
2855 get_push_address (size)
2856 int size;
2857 {
2858 register rtx temp;
2859
2860 if (STACK_PUSH_CODE == POST_DEC)
2861 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2862 else if (STACK_PUSH_CODE == POST_INC)
2863 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2864 else
2865 temp = stack_pointer_rtx;
2866
2867 return copy_to_reg (temp);
2868 }
2869
2870 /* Generate code to push X onto the stack, assuming it has mode MODE and
2871 type TYPE.
2872 MODE is redundant except when X is a CONST_INT (since they don't
2873 carry mode info).
2874 SIZE is an rtx for the size of data to be copied (in bytes),
2875 needed only if X is BLKmode.
2876
2877 ALIGN (in bytes) is maximum alignment we can assume.
2878
2879 If PARTIAL and REG are both nonzero, then copy that many of the first
2880 words of X into registers starting with REG, and push the rest of X.
2881 The amount of space pushed is decreased by PARTIAL words,
2882 rounded *down* to a multiple of PARM_BOUNDARY.
2883 REG must be a hard register in this case.
2884 If REG is zero but PARTIAL is not, take any all others actions for an
2885 argument partially in registers, but do not actually load any
2886 registers.
2887
2888 EXTRA is the amount in bytes of extra space to leave next to this arg.
2889 This is ignored if an argument block has already been allocated.
2890
2891 On a machine that lacks real push insns, ARGS_ADDR is the address of
2892 the bottom of the argument block for this call. We use indexing off there
2893 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2894 argument block has not been preallocated.
2895
2896 ARGS_SO_FAR is the size of args previously pushed for this call.
2897
2898 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2899 for arguments passed in registers. If nonzero, it will be the number
2900 of bytes required. */
2901
2902 void
2903 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2904 args_addr, args_so_far, reg_parm_stack_space,
2905 alignment_pad)
2906 register rtx x;
2907 enum machine_mode mode;
2908 tree type;
2909 rtx size;
2910 int align;
2911 int partial;
2912 rtx reg;
2913 int extra;
2914 rtx args_addr;
2915 rtx args_so_far;
2916 int reg_parm_stack_space;
2917 rtx alignment_pad;
2918 {
2919 rtx xinner;
2920 enum direction stack_direction
2921 #ifdef STACK_GROWS_DOWNWARD
2922 = downward;
2923 #else
2924 = upward;
2925 #endif
2926
2927 /* Decide where to pad the argument: `downward' for below,
2928 `upward' for above, or `none' for don't pad it.
2929 Default is below for small data on big-endian machines; else above. */
2930 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2931
2932 /* Invert direction if stack is post-update. */
2933 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2934 if (where_pad != none)
2935 where_pad = (where_pad == downward ? upward : downward);
2936
2937 xinner = x = protect_from_queue (x, 0);
2938
2939 if (mode == BLKmode)
2940 {
2941 /* Copy a block into the stack, entirely or partially. */
2942
2943 register rtx temp;
2944 int used = partial * UNITS_PER_WORD;
2945 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2946 int skip;
2947
2948 if (size == 0)
2949 abort ();
2950
2951 used -= offset;
2952
2953 /* USED is now the # of bytes we need not copy to the stack
2954 because registers will take care of them. */
2955
2956 if (partial != 0)
2957 xinner = change_address (xinner, BLKmode,
2958 plus_constant (XEXP (xinner, 0), used));
2959
2960 /* If the partial register-part of the arg counts in its stack size,
2961 skip the part of stack space corresponding to the registers.
2962 Otherwise, start copying to the beginning of the stack space,
2963 by setting SKIP to 0. */
2964 skip = (reg_parm_stack_space == 0) ? 0 : used;
2965
2966 #ifdef PUSH_ROUNDING
2967 /* Do it with several push insns if that doesn't take lots of insns
2968 and if there is no difficulty with push insns that skip bytes
2969 on the stack for alignment purposes. */
2970 if (args_addr == 0
2971 && GET_CODE (size) == CONST_INT
2972 && skip == 0
2973 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2974 /* Here we avoid the case of a structure whose weak alignment
2975 forces many pushes of a small amount of data,
2976 and such small pushes do rounding that causes trouble. */
2977 && ((! SLOW_UNALIGNED_ACCESS)
2978 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2979 || PUSH_ROUNDING (align) == align)
2980 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2981 {
2982 /* Push padding now if padding above and stack grows down,
2983 or if padding below and stack grows up.
2984 But if space already allocated, this has already been done. */
2985 if (extra && args_addr == 0
2986 && where_pad != none && where_pad != stack_direction)
2987 anti_adjust_stack (GEN_INT (extra));
2988
2989 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2990 INTVAL (size) - used, align);
2991
2992 if (current_function_check_memory_usage && ! in_check_memory_usage)
2993 {
2994 rtx temp;
2995
2996 in_check_memory_usage = 1;
2997 temp = get_push_address (INTVAL(size) - used);
2998 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2999 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3000 temp, Pmode,
3001 XEXP (xinner, 0), Pmode,
3002 GEN_INT (INTVAL(size) - used),
3003 TYPE_MODE (sizetype));
3004 else
3005 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3006 temp, Pmode,
3007 GEN_INT (INTVAL(size) - used),
3008 TYPE_MODE (sizetype),
3009 GEN_INT (MEMORY_USE_RW),
3010 TYPE_MODE (integer_type_node));
3011 in_check_memory_usage = 0;
3012 }
3013 }
3014 else
3015 #endif /* PUSH_ROUNDING */
3016 {
3017 /* Otherwise make space on the stack and copy the data
3018 to the address of that space. */
3019
3020 /* Deduct words put into registers from the size we must copy. */
3021 if (partial != 0)
3022 {
3023 if (GET_CODE (size) == CONST_INT)
3024 size = GEN_INT (INTVAL (size) - used);
3025 else
3026 size = expand_binop (GET_MODE (size), sub_optab, size,
3027 GEN_INT (used), NULL_RTX, 0,
3028 OPTAB_LIB_WIDEN);
3029 }
3030
3031 /* Get the address of the stack space.
3032 In this case, we do not deal with EXTRA separately.
3033 A single stack adjust will do. */
3034 if (! args_addr)
3035 {
3036 temp = push_block (size, extra, where_pad == downward);
3037 extra = 0;
3038 }
3039 else if (GET_CODE (args_so_far) == CONST_INT)
3040 temp = memory_address (BLKmode,
3041 plus_constant (args_addr,
3042 skip + INTVAL (args_so_far)));
3043 else
3044 temp = memory_address (BLKmode,
3045 plus_constant (gen_rtx_PLUS (Pmode,
3046 args_addr,
3047 args_so_far),
3048 skip));
3049 if (current_function_check_memory_usage && ! in_check_memory_usage)
3050 {
3051 rtx target;
3052
3053 in_check_memory_usage = 1;
3054 target = copy_to_reg (temp);
3055 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3056 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3057 target, Pmode,
3058 XEXP (xinner, 0), Pmode,
3059 size, TYPE_MODE (sizetype));
3060 else
3061 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3062 target, Pmode,
3063 size, TYPE_MODE (sizetype),
3064 GEN_INT (MEMORY_USE_RW),
3065 TYPE_MODE (integer_type_node));
3066 in_check_memory_usage = 0;
3067 }
3068
3069 /* TEMP is the address of the block. Copy the data there. */
3070 if (GET_CODE (size) == CONST_INT
3071 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3072 {
3073 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3074 INTVAL (size), align);
3075 goto ret;
3076 }
3077 else
3078 {
3079 rtx opalign = GEN_INT (align);
3080 enum machine_mode mode;
3081 rtx target = gen_rtx_MEM (BLKmode, temp);
3082
3083 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3084 mode != VOIDmode;
3085 mode = GET_MODE_WIDER_MODE (mode))
3086 {
3087 enum insn_code code = movstr_optab[(int) mode];
3088 insn_operand_predicate_fn pred;
3089
3090 if (code != CODE_FOR_nothing
3091 && ((GET_CODE (size) == CONST_INT
3092 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3093 <= (GET_MODE_MASK (mode) >> 1)))
3094 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3095 && (!(pred = insn_data[(int) code].operand[0].predicate)
3096 || ((*pred) (target, BLKmode)))
3097 && (!(pred = insn_data[(int) code].operand[1].predicate)
3098 || ((*pred) (xinner, BLKmode)))
3099 && (!(pred = insn_data[(int) code].operand[3].predicate)
3100 || ((*pred) (opalign, VOIDmode))))
3101 {
3102 rtx op2 = convert_to_mode (mode, size, 1);
3103 rtx last = get_last_insn ();
3104 rtx pat;
3105
3106 pred = insn_data[(int) code].operand[2].predicate;
3107 if (pred != 0 && ! (*pred) (op2, mode))
3108 op2 = copy_to_mode_reg (mode, op2);
3109
3110 pat = GEN_FCN ((int) code) (target, xinner,
3111 op2, opalign);
3112 if (pat)
3113 {
3114 emit_insn (pat);
3115 goto ret;
3116 }
3117 else
3118 delete_insns_since (last);
3119 }
3120 }
3121 }
3122
3123 #ifndef ACCUMULATE_OUTGOING_ARGS
3124 /* If the source is referenced relative to the stack pointer,
3125 copy it to another register to stabilize it. We do not need
3126 to do this if we know that we won't be changing sp. */
3127
3128 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3129 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3130 temp = copy_to_reg (temp);
3131 #endif
3132
3133 /* Make inhibit_defer_pop nonzero around the library call
3134 to force it to pop the bcopy-arguments right away. */
3135 NO_DEFER_POP;
3136 #ifdef TARGET_MEM_FUNCTIONS
3137 emit_library_call (memcpy_libfunc, 0,
3138 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3139 convert_to_mode (TYPE_MODE (sizetype),
3140 size, TREE_UNSIGNED (sizetype)),
3141 TYPE_MODE (sizetype));
3142 #else
3143 emit_library_call (bcopy_libfunc, 0,
3144 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3145 convert_to_mode (TYPE_MODE (integer_type_node),
3146 size,
3147 TREE_UNSIGNED (integer_type_node)),
3148 TYPE_MODE (integer_type_node));
3149 #endif
3150 OK_DEFER_POP;
3151 }
3152 }
3153 else if (partial > 0)
3154 {
3155 /* Scalar partly in registers. */
3156
3157 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3158 int i;
3159 int not_stack;
3160 /* # words of start of argument
3161 that we must make space for but need not store. */
3162 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3163 int args_offset = INTVAL (args_so_far);
3164 int skip;
3165
3166 /* Push padding now if padding above and stack grows down,
3167 or if padding below and stack grows up.
3168 But if space already allocated, this has already been done. */
3169 if (extra && args_addr == 0
3170 && where_pad != none && where_pad != stack_direction)
3171 anti_adjust_stack (GEN_INT (extra));
3172
3173 /* If we make space by pushing it, we might as well push
3174 the real data. Otherwise, we can leave OFFSET nonzero
3175 and leave the space uninitialized. */
3176 if (args_addr == 0)
3177 offset = 0;
3178
3179 /* Now NOT_STACK gets the number of words that we don't need to
3180 allocate on the stack. */
3181 not_stack = partial - offset;
3182
3183 /* If the partial register-part of the arg counts in its stack size,
3184 skip the part of stack space corresponding to the registers.
3185 Otherwise, start copying to the beginning of the stack space,
3186 by setting SKIP to 0. */
3187 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3188
3189 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3190 x = validize_mem (force_const_mem (mode, x));
3191
3192 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3193 SUBREGs of such registers are not allowed. */
3194 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3195 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3196 x = copy_to_reg (x);
3197
3198 /* Loop over all the words allocated on the stack for this arg. */
3199 /* We can do it by words, because any scalar bigger than a word
3200 has a size a multiple of a word. */
3201 #ifndef PUSH_ARGS_REVERSED
3202 for (i = not_stack; i < size; i++)
3203 #else
3204 for (i = size - 1; i >= not_stack; i--)
3205 #endif
3206 if (i >= not_stack + offset)
3207 emit_push_insn (operand_subword_force (x, i, mode),
3208 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3209 0, args_addr,
3210 GEN_INT (args_offset + ((i - not_stack + skip)
3211 * UNITS_PER_WORD)),
3212 reg_parm_stack_space, alignment_pad);
3213 }
3214 else
3215 {
3216 rtx addr;
3217 rtx target = NULL_RTX;
3218
3219 /* Push padding now if padding above and stack grows down,
3220 or if padding below and stack grows up.
3221 But if space already allocated, this has already been done. */
3222 if (extra && args_addr == 0
3223 && where_pad != none && where_pad != stack_direction)
3224 anti_adjust_stack (GEN_INT (extra));
3225
3226 #ifdef PUSH_ROUNDING
3227 if (args_addr == 0)
3228 addr = gen_push_operand ();
3229 else
3230 #endif
3231 {
3232 if (GET_CODE (args_so_far) == CONST_INT)
3233 addr
3234 = memory_address (mode,
3235 plus_constant (args_addr,
3236 INTVAL (args_so_far)));
3237 else
3238 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3239 args_so_far));
3240 target = addr;
3241 }
3242
3243 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3244
3245 if (current_function_check_memory_usage && ! in_check_memory_usage)
3246 {
3247 in_check_memory_usage = 1;
3248 if (target == 0)
3249 target = get_push_address (GET_MODE_SIZE (mode));
3250
3251 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3252 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3253 target, Pmode,
3254 XEXP (x, 0), Pmode,
3255 GEN_INT (GET_MODE_SIZE (mode)),
3256 TYPE_MODE (sizetype));
3257 else
3258 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3259 target, Pmode,
3260 GEN_INT (GET_MODE_SIZE (mode)),
3261 TYPE_MODE (sizetype),
3262 GEN_INT (MEMORY_USE_RW),
3263 TYPE_MODE (integer_type_node));
3264 in_check_memory_usage = 0;
3265 }
3266 }
3267
3268 ret:
3269 /* If part should go in registers, copy that part
3270 into the appropriate registers. Do this now, at the end,
3271 since mem-to-mem copies above may do function calls. */
3272 if (partial > 0 && reg != 0)
3273 {
3274 /* Handle calls that pass values in multiple non-contiguous locations.
3275 The Irix 6 ABI has examples of this. */
3276 if (GET_CODE (reg) == PARALLEL)
3277 emit_group_load (reg, x, -1, align); /* ??? size? */
3278 else
3279 move_block_to_reg (REGNO (reg), x, partial, mode);
3280 }
3281
3282 if (extra && args_addr == 0 && where_pad == stack_direction)
3283 anti_adjust_stack (GEN_INT (extra));
3284
3285 if (alignment_pad)
3286 anti_adjust_stack (alignment_pad);
3287 }
3288 \f
3289 /* Expand an assignment that stores the value of FROM into TO.
3290 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3291 (This may contain a QUEUED rtx;
3292 if the value is constant, this rtx is a constant.)
3293 Otherwise, the returned value is NULL_RTX.
3294
3295 SUGGEST_REG is no longer actually used.
3296 It used to mean, copy the value through a register
3297 and return that register, if that is possible.
3298 We now use WANT_VALUE to decide whether to do this. */
3299
3300 rtx
3301 expand_assignment (to, from, want_value, suggest_reg)
3302 tree to, from;
3303 int want_value;
3304 int suggest_reg ATTRIBUTE_UNUSED;
3305 {
3306 register rtx to_rtx = 0;
3307 rtx result;
3308
3309 /* Don't crash if the lhs of the assignment was erroneous. */
3310
3311 if (TREE_CODE (to) == ERROR_MARK)
3312 {
3313 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3314 return want_value ? result : NULL_RTX;
3315 }
3316
3317 /* Assignment of a structure component needs special treatment
3318 if the structure component's rtx is not simply a MEM.
3319 Assignment of an array element at a constant index, and assignment of
3320 an array element in an unaligned packed structure field, has the same
3321 problem. */
3322
3323 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3324 || TREE_CODE (to) == ARRAY_REF)
3325 {
3326 enum machine_mode mode1;
3327 int bitsize;
3328 int bitpos;
3329 tree offset;
3330 int unsignedp;
3331 int volatilep = 0;
3332 tree tem;
3333 int alignment;
3334
3335 push_temp_slots ();
3336 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3337 &unsignedp, &volatilep, &alignment);
3338
3339 /* If we are going to use store_bit_field and extract_bit_field,
3340 make sure to_rtx will be safe for multiple use. */
3341
3342 if (mode1 == VOIDmode && want_value)
3343 tem = stabilize_reference (tem);
3344
3345 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3346 if (offset != 0)
3347 {
3348 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3349
3350 if (GET_CODE (to_rtx) != MEM)
3351 abort ();
3352
3353 if (GET_MODE (offset_rtx) != ptr_mode)
3354 {
3355 #ifdef POINTERS_EXTEND_UNSIGNED
3356 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3357 #else
3358 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3359 #endif
3360 }
3361
3362 /* A constant address in TO_RTX can have VOIDmode, we must not try
3363 to call force_reg for that case. Avoid that case. */
3364 if (GET_CODE (to_rtx) == MEM
3365 && GET_MODE (to_rtx) == BLKmode
3366 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3367 && bitsize
3368 && (bitpos % bitsize) == 0
3369 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3370 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3371 {
3372 rtx temp = change_address (to_rtx, mode1,
3373 plus_constant (XEXP (to_rtx, 0),
3374 (bitpos /
3375 BITS_PER_UNIT)));
3376 if (GET_CODE (XEXP (temp, 0)) == REG)
3377 to_rtx = temp;
3378 else
3379 to_rtx = change_address (to_rtx, mode1,
3380 force_reg (GET_MODE (XEXP (temp, 0)),
3381 XEXP (temp, 0)));
3382 bitpos = 0;
3383 }
3384
3385 to_rtx = change_address (to_rtx, VOIDmode,
3386 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3387 force_reg (ptr_mode,
3388 offset_rtx)));
3389 }
3390
3391 if (volatilep)
3392 {
3393 if (GET_CODE (to_rtx) == MEM)
3394 {
3395 /* When the offset is zero, to_rtx is the address of the
3396 structure we are storing into, and hence may be shared.
3397 We must make a new MEM before setting the volatile bit. */
3398 if (offset == 0)
3399 to_rtx = copy_rtx (to_rtx);
3400
3401 MEM_VOLATILE_P (to_rtx) = 1;
3402 }
3403 #if 0 /* This was turned off because, when a field is volatile
3404 in an object which is not volatile, the object may be in a register,
3405 and then we would abort over here. */
3406 else
3407 abort ();
3408 #endif
3409 }
3410
3411 if (TREE_CODE (to) == COMPONENT_REF
3412 && TREE_READONLY (TREE_OPERAND (to, 1)))
3413 {
3414 if (offset == 0)
3415 to_rtx = copy_rtx (to_rtx);
3416
3417 RTX_UNCHANGING_P (to_rtx) = 1;
3418 }
3419
3420 /* Check the access. */
3421 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3422 {
3423 rtx to_addr;
3424 int size;
3425 int best_mode_size;
3426 enum machine_mode best_mode;
3427
3428 best_mode = get_best_mode (bitsize, bitpos,
3429 TYPE_ALIGN (TREE_TYPE (tem)),
3430 mode1, volatilep);
3431 if (best_mode == VOIDmode)
3432 best_mode = QImode;
3433
3434 best_mode_size = GET_MODE_BITSIZE (best_mode);
3435 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3436 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3437 size *= GET_MODE_SIZE (best_mode);
3438
3439 /* Check the access right of the pointer. */
3440 if (size)
3441 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3442 to_addr, Pmode,
3443 GEN_INT (size), TYPE_MODE (sizetype),
3444 GEN_INT (MEMORY_USE_WO),
3445 TYPE_MODE (integer_type_node));
3446 }
3447
3448 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3449 (want_value
3450 /* Spurious cast makes HPUX compiler happy. */
3451 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3452 : VOIDmode),
3453 unsignedp,
3454 /* Required alignment of containing datum. */
3455 alignment,
3456 int_size_in_bytes (TREE_TYPE (tem)),
3457 get_alias_set (to));
3458 preserve_temp_slots (result);
3459 free_temp_slots ();
3460 pop_temp_slots ();
3461
3462 /* If the value is meaningful, convert RESULT to the proper mode.
3463 Otherwise, return nothing. */
3464 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3465 TYPE_MODE (TREE_TYPE (from)),
3466 result,
3467 TREE_UNSIGNED (TREE_TYPE (to)))
3468 : NULL_RTX);
3469 }
3470
3471 /* If the rhs is a function call and its value is not an aggregate,
3472 call the function before we start to compute the lhs.
3473 This is needed for correct code for cases such as
3474 val = setjmp (buf) on machines where reference to val
3475 requires loading up part of an address in a separate insn.
3476
3477 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3478 a promoted variable where the zero- or sign- extension needs to be done.
3479 Handling this in the normal way is safe because no computation is done
3480 before the call. */
3481 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3482 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3483 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3484 {
3485 rtx value;
3486
3487 push_temp_slots ();
3488 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3489 if (to_rtx == 0)
3490 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3491
3492 /* Handle calls that return values in multiple non-contiguous locations.
3493 The Irix 6 ABI has examples of this. */
3494 if (GET_CODE (to_rtx) == PARALLEL)
3495 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3496 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3497 else if (GET_MODE (to_rtx) == BLKmode)
3498 emit_block_move (to_rtx, value, expr_size (from),
3499 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3500 else
3501 {
3502 #ifdef POINTERS_EXTEND_UNSIGNED
3503 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3504 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3505 value = convert_memory_address (GET_MODE (to_rtx), value);
3506 #endif
3507 emit_move_insn (to_rtx, value);
3508 }
3509 preserve_temp_slots (to_rtx);
3510 free_temp_slots ();
3511 pop_temp_slots ();
3512 return want_value ? to_rtx : NULL_RTX;
3513 }
3514
3515 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3516 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3517
3518 if (to_rtx == 0)
3519 {
3520 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3521 if (GET_CODE (to_rtx) == MEM)
3522 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3523 }
3524
3525 /* Don't move directly into a return register. */
3526 if (TREE_CODE (to) == RESULT_DECL
3527 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3528 {
3529 rtx temp;
3530
3531 push_temp_slots ();
3532 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3533
3534 if (GET_CODE (to_rtx) == PARALLEL)
3535 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3536 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3537 else
3538 emit_move_insn (to_rtx, temp);
3539
3540 preserve_temp_slots (to_rtx);
3541 free_temp_slots ();
3542 pop_temp_slots ();
3543 return want_value ? to_rtx : NULL_RTX;
3544 }
3545
3546 /* In case we are returning the contents of an object which overlaps
3547 the place the value is being stored, use a safe function when copying
3548 a value through a pointer into a structure value return block. */
3549 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3550 && current_function_returns_struct
3551 && !current_function_returns_pcc_struct)
3552 {
3553 rtx from_rtx, size;
3554
3555 push_temp_slots ();
3556 size = expr_size (from);
3557 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3558 EXPAND_MEMORY_USE_DONT);
3559
3560 /* Copy the rights of the bitmap. */
3561 if (current_function_check_memory_usage)
3562 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3563 XEXP (to_rtx, 0), Pmode,
3564 XEXP (from_rtx, 0), Pmode,
3565 convert_to_mode (TYPE_MODE (sizetype),
3566 size, TREE_UNSIGNED (sizetype)),
3567 TYPE_MODE (sizetype));
3568
3569 #ifdef TARGET_MEM_FUNCTIONS
3570 emit_library_call (memcpy_libfunc, 0,
3571 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3572 XEXP (from_rtx, 0), Pmode,
3573 convert_to_mode (TYPE_MODE (sizetype),
3574 size, TREE_UNSIGNED (sizetype)),
3575 TYPE_MODE (sizetype));
3576 #else
3577 emit_library_call (bcopy_libfunc, 0,
3578 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3579 XEXP (to_rtx, 0), Pmode,
3580 convert_to_mode (TYPE_MODE (integer_type_node),
3581 size, TREE_UNSIGNED (integer_type_node)),
3582 TYPE_MODE (integer_type_node));
3583 #endif
3584
3585 preserve_temp_slots (to_rtx);
3586 free_temp_slots ();
3587 pop_temp_slots ();
3588 return want_value ? to_rtx : NULL_RTX;
3589 }
3590
3591 /* Compute FROM and store the value in the rtx we got. */
3592
3593 push_temp_slots ();
3594 result = store_expr (from, to_rtx, want_value);
3595 preserve_temp_slots (result);
3596 free_temp_slots ();
3597 pop_temp_slots ();
3598 return want_value ? result : NULL_RTX;
3599 }
3600
3601 /* Generate code for computing expression EXP,
3602 and storing the value into TARGET.
3603 TARGET may contain a QUEUED rtx.
3604
3605 If WANT_VALUE is nonzero, return a copy of the value
3606 not in TARGET, so that we can be sure to use the proper
3607 value in a containing expression even if TARGET has something
3608 else stored in it. If possible, we copy the value through a pseudo
3609 and return that pseudo. Or, if the value is constant, we try to
3610 return the constant. In some cases, we return a pseudo
3611 copied *from* TARGET.
3612
3613 If the mode is BLKmode then we may return TARGET itself.
3614 It turns out that in BLKmode it doesn't cause a problem.
3615 because C has no operators that could combine two different
3616 assignments into the same BLKmode object with different values
3617 with no sequence point. Will other languages need this to
3618 be more thorough?
3619
3620 If WANT_VALUE is 0, we return NULL, to make sure
3621 to catch quickly any cases where the caller uses the value
3622 and fails to set WANT_VALUE. */
3623
3624 rtx
3625 store_expr (exp, target, want_value)
3626 register tree exp;
3627 register rtx target;
3628 int want_value;
3629 {
3630 register rtx temp;
3631 int dont_return_target = 0;
3632
3633 if (TREE_CODE (exp) == COMPOUND_EXPR)
3634 {
3635 /* Perform first part of compound expression, then assign from second
3636 part. */
3637 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3638 emit_queue ();
3639 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3640 }
3641 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3642 {
3643 /* For conditional expression, get safe form of the target. Then
3644 test the condition, doing the appropriate assignment on either
3645 side. This avoids the creation of unnecessary temporaries.
3646 For non-BLKmode, it is more efficient not to do this. */
3647
3648 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3649
3650 emit_queue ();
3651 target = protect_from_queue (target, 1);
3652
3653 do_pending_stack_adjust ();
3654 NO_DEFER_POP;
3655 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3656 start_cleanup_deferral ();
3657 store_expr (TREE_OPERAND (exp, 1), target, 0);
3658 end_cleanup_deferral ();
3659 emit_queue ();
3660 emit_jump_insn (gen_jump (lab2));
3661 emit_barrier ();
3662 emit_label (lab1);
3663 start_cleanup_deferral ();
3664 store_expr (TREE_OPERAND (exp, 2), target, 0);
3665 end_cleanup_deferral ();
3666 emit_queue ();
3667 emit_label (lab2);
3668 OK_DEFER_POP;
3669
3670 return want_value ? target : NULL_RTX;
3671 }
3672 else if (queued_subexp_p (target))
3673 /* If target contains a postincrement, let's not risk
3674 using it as the place to generate the rhs. */
3675 {
3676 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3677 {
3678 /* Expand EXP into a new pseudo. */
3679 temp = gen_reg_rtx (GET_MODE (target));
3680 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3681 }
3682 else
3683 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3684
3685 /* If target is volatile, ANSI requires accessing the value
3686 *from* the target, if it is accessed. So make that happen.
3687 In no case return the target itself. */
3688 if (! MEM_VOLATILE_P (target) && want_value)
3689 dont_return_target = 1;
3690 }
3691 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3692 && GET_MODE (target) != BLKmode)
3693 /* If target is in memory and caller wants value in a register instead,
3694 arrange that. Pass TARGET as target for expand_expr so that,
3695 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3696 We know expand_expr will not use the target in that case.
3697 Don't do this if TARGET is volatile because we are supposed
3698 to write it and then read it. */
3699 {
3700 temp = expand_expr (exp, target, GET_MODE (target), 0);
3701 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3702 temp = copy_to_reg (temp);
3703 dont_return_target = 1;
3704 }
3705 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3706 /* If this is an scalar in a register that is stored in a wider mode
3707 than the declared mode, compute the result into its declared mode
3708 and then convert to the wider mode. Our value is the computed
3709 expression. */
3710 {
3711 /* If we don't want a value, we can do the conversion inside EXP,
3712 which will often result in some optimizations. Do the conversion
3713 in two steps: first change the signedness, if needed, then
3714 the extend. But don't do this if the type of EXP is a subtype
3715 of something else since then the conversion might involve
3716 more than just converting modes. */
3717 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3718 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3719 {
3720 if (TREE_UNSIGNED (TREE_TYPE (exp))
3721 != SUBREG_PROMOTED_UNSIGNED_P (target))
3722 exp
3723 = convert
3724 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3725 TREE_TYPE (exp)),
3726 exp);
3727
3728 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3729 SUBREG_PROMOTED_UNSIGNED_P (target)),
3730 exp);
3731 }
3732
3733 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3734
3735 /* If TEMP is a volatile MEM and we want a result value, make
3736 the access now so it gets done only once. Likewise if
3737 it contains TARGET. */
3738 if (GET_CODE (temp) == MEM && want_value
3739 && (MEM_VOLATILE_P (temp)
3740 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3741 temp = copy_to_reg (temp);
3742
3743 /* If TEMP is a VOIDmode constant, use convert_modes to make
3744 sure that we properly convert it. */
3745 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3746 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3747 TYPE_MODE (TREE_TYPE (exp)), temp,
3748 SUBREG_PROMOTED_UNSIGNED_P (target));
3749
3750 convert_move (SUBREG_REG (target), temp,
3751 SUBREG_PROMOTED_UNSIGNED_P (target));
3752
3753 /* If we promoted a constant, change the mode back down to match
3754 target. Otherwise, the caller might get confused by a result whose
3755 mode is larger than expected. */
3756
3757 if (want_value && GET_MODE (temp) != GET_MODE (target)
3758 && GET_MODE (temp) != VOIDmode)
3759 {
3760 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3761 SUBREG_PROMOTED_VAR_P (temp) = 1;
3762 SUBREG_PROMOTED_UNSIGNED_P (temp)
3763 = SUBREG_PROMOTED_UNSIGNED_P (target);
3764 }
3765
3766 return want_value ? temp : NULL_RTX;
3767 }
3768 else
3769 {
3770 temp = expand_expr (exp, target, GET_MODE (target), 0);
3771 /* Return TARGET if it's a specified hardware register.
3772 If TARGET is a volatile mem ref, either return TARGET
3773 or return a reg copied *from* TARGET; ANSI requires this.
3774
3775 Otherwise, if TEMP is not TARGET, return TEMP
3776 if it is constant (for efficiency),
3777 or if we really want the correct value. */
3778 if (!(target && GET_CODE (target) == REG
3779 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3780 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3781 && ! rtx_equal_p (temp, target)
3782 && (CONSTANT_P (temp) || want_value))
3783 dont_return_target = 1;
3784 }
3785
3786 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3787 the same as that of TARGET, adjust the constant. This is needed, for
3788 example, in case it is a CONST_DOUBLE and we want only a word-sized
3789 value. */
3790 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3791 && TREE_CODE (exp) != ERROR_MARK
3792 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3793 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3794 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3795
3796 if (current_function_check_memory_usage
3797 && GET_CODE (target) == MEM
3798 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3799 {
3800 if (GET_CODE (temp) == MEM)
3801 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3802 XEXP (target, 0), Pmode,
3803 XEXP (temp, 0), Pmode,
3804 expr_size (exp), TYPE_MODE (sizetype));
3805 else
3806 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3807 XEXP (target, 0), Pmode,
3808 expr_size (exp), TYPE_MODE (sizetype),
3809 GEN_INT (MEMORY_USE_WO),
3810 TYPE_MODE (integer_type_node));
3811 }
3812
3813 /* If value was not generated in the target, store it there.
3814 Convert the value to TARGET's type first if nec. */
3815 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3816 one or both of them are volatile memory refs, we have to distinguish
3817 two cases:
3818 - expand_expr has used TARGET. In this case, we must not generate
3819 another copy. This can be detected by TARGET being equal according
3820 to == .
3821 - expand_expr has not used TARGET - that means that the source just
3822 happens to have the same RTX form. Since temp will have been created
3823 by expand_expr, it will compare unequal according to == .
3824 We must generate a copy in this case, to reach the correct number
3825 of volatile memory references. */
3826
3827 if ((! rtx_equal_p (temp, target)
3828 || (temp != target && (side_effects_p (temp)
3829 || side_effects_p (target))))
3830 && TREE_CODE (exp) != ERROR_MARK)
3831 {
3832 target = protect_from_queue (target, 1);
3833 if (GET_MODE (temp) != GET_MODE (target)
3834 && GET_MODE (temp) != VOIDmode)
3835 {
3836 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3837 if (dont_return_target)
3838 {
3839 /* In this case, we will return TEMP,
3840 so make sure it has the proper mode.
3841 But don't forget to store the value into TARGET. */
3842 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3843 emit_move_insn (target, temp);
3844 }
3845 else
3846 convert_move (target, temp, unsignedp);
3847 }
3848
3849 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3850 {
3851 /* Handle copying a string constant into an array.
3852 The string constant may be shorter than the array.
3853 So copy just the string's actual length, and clear the rest. */
3854 rtx size;
3855 rtx addr;
3856
3857 /* Get the size of the data type of the string,
3858 which is actually the size of the target. */
3859 size = expr_size (exp);
3860 if (GET_CODE (size) == CONST_INT
3861 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3862 emit_block_move (target, temp, size,
3863 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3864 else
3865 {
3866 /* Compute the size of the data to copy from the string. */
3867 tree copy_size
3868 = size_binop (MIN_EXPR,
3869 make_tree (sizetype, size),
3870 convert (sizetype,
3871 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3872 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3873 VOIDmode, 0);
3874 rtx label = 0;
3875
3876 /* Copy that much. */
3877 emit_block_move (target, temp, copy_size_rtx,
3878 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3879
3880 /* Figure out how much is left in TARGET that we have to clear.
3881 Do all calculations in ptr_mode. */
3882
3883 addr = XEXP (target, 0);
3884 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3885
3886 if (GET_CODE (copy_size_rtx) == CONST_INT)
3887 {
3888 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3889 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3890 }
3891 else
3892 {
3893 addr = force_reg (ptr_mode, addr);
3894 addr = expand_binop (ptr_mode, add_optab, addr,
3895 copy_size_rtx, NULL_RTX, 0,
3896 OPTAB_LIB_WIDEN);
3897
3898 size = expand_binop (ptr_mode, sub_optab, size,
3899 copy_size_rtx, NULL_RTX, 0,
3900 OPTAB_LIB_WIDEN);
3901
3902 label = gen_label_rtx ();
3903 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3904 GET_MODE (size), 0, 0, label);
3905 }
3906
3907 if (size != const0_rtx)
3908 {
3909 /* Be sure we can write on ADDR. */
3910 if (current_function_check_memory_usage)
3911 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3912 addr, Pmode,
3913 size, TYPE_MODE (sizetype),
3914 GEN_INT (MEMORY_USE_WO),
3915 TYPE_MODE (integer_type_node));
3916 #ifdef TARGET_MEM_FUNCTIONS
3917 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3918 addr, ptr_mode,
3919 const0_rtx, TYPE_MODE (integer_type_node),
3920 convert_to_mode (TYPE_MODE (sizetype),
3921 size,
3922 TREE_UNSIGNED (sizetype)),
3923 TYPE_MODE (sizetype));
3924 #else
3925 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3926 addr, ptr_mode,
3927 convert_to_mode (TYPE_MODE (integer_type_node),
3928 size,
3929 TREE_UNSIGNED (integer_type_node)),
3930 TYPE_MODE (integer_type_node));
3931 #endif
3932 }
3933
3934 if (label)
3935 emit_label (label);
3936 }
3937 }
3938 /* Handle calls that return values in multiple non-contiguous locations.
3939 The Irix 6 ABI has examples of this. */
3940 else if (GET_CODE (target) == PARALLEL)
3941 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3942 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3943 else if (GET_MODE (temp) == BLKmode)
3944 emit_block_move (target, temp, expr_size (exp),
3945 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3946 else
3947 emit_move_insn (target, temp);
3948 }
3949
3950 /* If we don't want a value, return NULL_RTX. */
3951 if (! want_value)
3952 return NULL_RTX;
3953
3954 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3955 ??? The latter test doesn't seem to make sense. */
3956 else if (dont_return_target && GET_CODE (temp) != MEM)
3957 return temp;
3958
3959 /* Return TARGET itself if it is a hard register. */
3960 else if (want_value && GET_MODE (target) != BLKmode
3961 && ! (GET_CODE (target) == REG
3962 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3963 return copy_to_reg (target);
3964
3965 else
3966 return target;
3967 }
3968 \f
3969 /* Return 1 if EXP just contains zeros. */
3970
3971 static int
3972 is_zeros_p (exp)
3973 tree exp;
3974 {
3975 tree elt;
3976
3977 switch (TREE_CODE (exp))
3978 {
3979 case CONVERT_EXPR:
3980 case NOP_EXPR:
3981 case NON_LVALUE_EXPR:
3982 return is_zeros_p (TREE_OPERAND (exp, 0));
3983
3984 case INTEGER_CST:
3985 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3986
3987 case COMPLEX_CST:
3988 return
3989 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3990
3991 case REAL_CST:
3992 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3993
3994 case CONSTRUCTOR:
3995 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3996 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3997 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3998 if (! is_zeros_p (TREE_VALUE (elt)))
3999 return 0;
4000
4001 return 1;
4002
4003 default:
4004 return 0;
4005 }
4006 }
4007
4008 /* Return 1 if EXP contains mostly (3/4) zeros. */
4009
4010 static int
4011 mostly_zeros_p (exp)
4012 tree exp;
4013 {
4014 if (TREE_CODE (exp) == CONSTRUCTOR)
4015 {
4016 int elts = 0, zeros = 0;
4017 tree elt = CONSTRUCTOR_ELTS (exp);
4018 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4019 {
4020 /* If there are no ranges of true bits, it is all zero. */
4021 return elt == NULL_TREE;
4022 }
4023 for (; elt; elt = TREE_CHAIN (elt))
4024 {
4025 /* We do not handle the case where the index is a RANGE_EXPR,
4026 so the statistic will be somewhat inaccurate.
4027 We do make a more accurate count in store_constructor itself,
4028 so since this function is only used for nested array elements,
4029 this should be close enough. */
4030 if (mostly_zeros_p (TREE_VALUE (elt)))
4031 zeros++;
4032 elts++;
4033 }
4034
4035 return 4 * zeros >= 3 * elts;
4036 }
4037
4038 return is_zeros_p (exp);
4039 }
4040 \f
4041 /* Helper function for store_constructor.
4042 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4043 TYPE is the type of the CONSTRUCTOR, not the element type.
4044 ALIGN and CLEARED are as for store_constructor.
4045
4046 This provides a recursive shortcut back to store_constructor when it isn't
4047 necessary to go through store_field. This is so that we can pass through
4048 the cleared field to let store_constructor know that we may not have to
4049 clear a substructure if the outer structure has already been cleared. */
4050
4051 static void
4052 store_constructor_field (target, bitsize, bitpos,
4053 mode, exp, type, align, cleared)
4054 rtx target;
4055 int bitsize, bitpos;
4056 enum machine_mode mode;
4057 tree exp, type;
4058 int align;
4059 int cleared;
4060 {
4061 if (TREE_CODE (exp) == CONSTRUCTOR
4062 && bitpos % BITS_PER_UNIT == 0
4063 /* If we have a non-zero bitpos for a register target, then we just
4064 let store_field do the bitfield handling. This is unlikely to
4065 generate unnecessary clear instructions anyways. */
4066 && (bitpos == 0 || GET_CODE (target) == MEM))
4067 {
4068 if (bitpos != 0)
4069 target
4070 = change_address (target,
4071 GET_MODE (target) == BLKmode
4072 || 0 != (bitpos
4073 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4074 ? BLKmode : VOIDmode,
4075 plus_constant (XEXP (target, 0),
4076 bitpos / BITS_PER_UNIT));
4077 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4078 }
4079 else
4080 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4081 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4082 int_size_in_bytes (type), cleared);
4083 }
4084
4085 /* Store the value of constructor EXP into the rtx TARGET.
4086 TARGET is either a REG or a MEM.
4087 ALIGN is the maximum known alignment for TARGET, in bits.
4088 CLEARED is true if TARGET is known to have been zero'd.
4089 SIZE is the number of bytes of TARGET we are allowed to modify: this
4090 may not be the same as the size of EXP if we are assigning to a field
4091 which has been packed to exclude padding bits. */
4092
4093 static void
4094 store_constructor (exp, target, align, cleared, size)
4095 tree exp;
4096 rtx target;
4097 int align;
4098 int cleared;
4099 int size;
4100 {
4101 tree type = TREE_TYPE (exp);
4102 #ifdef WORD_REGISTER_OPERATIONS
4103 rtx exp_size = expr_size (exp);
4104 #endif
4105
4106 /* We know our target cannot conflict, since safe_from_p has been called. */
4107 #if 0
4108 /* Don't try copying piece by piece into a hard register
4109 since that is vulnerable to being clobbered by EXP.
4110 Instead, construct in a pseudo register and then copy it all. */
4111 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4112 {
4113 rtx temp = gen_reg_rtx (GET_MODE (target));
4114 store_constructor (exp, temp, align, cleared, size);
4115 emit_move_insn (target, temp);
4116 return;
4117 }
4118 #endif
4119
4120 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4121 || TREE_CODE (type) == QUAL_UNION_TYPE)
4122 {
4123 register tree elt;
4124
4125 /* Inform later passes that the whole union value is dead. */
4126 if ((TREE_CODE (type) == UNION_TYPE
4127 || TREE_CODE (type) == QUAL_UNION_TYPE)
4128 && ! cleared)
4129 {
4130 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4131
4132 /* If the constructor is empty, clear the union. */
4133 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4134 clear_storage (target, expr_size (exp),
4135 TYPE_ALIGN (type) / BITS_PER_UNIT);
4136 }
4137
4138 /* If we are building a static constructor into a register,
4139 set the initial value as zero so we can fold the value into
4140 a constant. But if more than one register is involved,
4141 this probably loses. */
4142 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4143 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4144 {
4145 if (! cleared)
4146 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4147
4148 cleared = 1;
4149 }
4150
4151 /* If the constructor has fewer fields than the structure
4152 or if we are initializing the structure to mostly zeros,
4153 clear the whole structure first. */
4154 else if (size > 0
4155 && ((list_length (CONSTRUCTOR_ELTS (exp))
4156 != list_length (TYPE_FIELDS (type)))
4157 || mostly_zeros_p (exp)))
4158 {
4159 if (! cleared)
4160 clear_storage (target, GEN_INT (size),
4161 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4162
4163 cleared = 1;
4164 }
4165 else if (! cleared)
4166 /* Inform later passes that the old value is dead. */
4167 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4168
4169 /* Store each element of the constructor into
4170 the corresponding field of TARGET. */
4171
4172 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4173 {
4174 register tree field = TREE_PURPOSE (elt);
4175 #ifdef WORD_REGISTER_OPERATIONS
4176 tree value = TREE_VALUE (elt);
4177 #endif
4178 register enum machine_mode mode;
4179 int bitsize;
4180 int bitpos = 0;
4181 int unsignedp;
4182 tree pos, constant = 0, offset = 0;
4183 rtx to_rtx = target;
4184
4185 /* Just ignore missing fields.
4186 We cleared the whole structure, above,
4187 if any fields are missing. */
4188 if (field == 0)
4189 continue;
4190
4191 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4192 continue;
4193
4194 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4195 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4196 else
4197 bitsize = -1;
4198
4199 unsignedp = TREE_UNSIGNED (field);
4200 mode = DECL_MODE (field);
4201 if (DECL_BIT_FIELD (field))
4202 mode = VOIDmode;
4203
4204 pos = DECL_FIELD_BITPOS (field);
4205 if (TREE_CODE (pos) == INTEGER_CST)
4206 constant = pos;
4207 else if (TREE_CODE (pos) == PLUS_EXPR
4208 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4209 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4210 else
4211 offset = pos;
4212
4213 if (constant)
4214 bitpos = TREE_INT_CST_LOW (constant);
4215
4216 if (offset)
4217 {
4218 rtx offset_rtx;
4219
4220 if (contains_placeholder_p (offset))
4221 offset = build (WITH_RECORD_EXPR, sizetype,
4222 offset, make_tree (TREE_TYPE (exp), target));
4223
4224 offset = size_binop (EXACT_DIV_EXPR, offset,
4225 size_int (BITS_PER_UNIT));
4226
4227 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4228 if (GET_CODE (to_rtx) != MEM)
4229 abort ();
4230
4231 if (GET_MODE (offset_rtx) != ptr_mode)
4232 {
4233 #ifdef POINTERS_EXTEND_UNSIGNED
4234 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4235 #else
4236 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4237 #endif
4238 }
4239
4240 to_rtx
4241 = change_address (to_rtx, VOIDmode,
4242 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4243 force_reg (ptr_mode,
4244 offset_rtx)));
4245 }
4246
4247 if (TREE_READONLY (field))
4248 {
4249 if (GET_CODE (to_rtx) == MEM)
4250 to_rtx = copy_rtx (to_rtx);
4251
4252 RTX_UNCHANGING_P (to_rtx) = 1;
4253 }
4254
4255 #ifdef WORD_REGISTER_OPERATIONS
4256 /* If this initializes a field that is smaller than a word, at the
4257 start of a word, try to widen it to a full word.
4258 This special case allows us to output C++ member function
4259 initializations in a form that the optimizers can understand. */
4260 if (constant
4261 && GET_CODE (target) == REG
4262 && bitsize < BITS_PER_WORD
4263 && bitpos % BITS_PER_WORD == 0
4264 && GET_MODE_CLASS (mode) == MODE_INT
4265 && TREE_CODE (value) == INTEGER_CST
4266 && GET_CODE (exp_size) == CONST_INT
4267 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4268 {
4269 tree type = TREE_TYPE (value);
4270 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4271 {
4272 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4273 value = convert (type, value);
4274 }
4275 if (BYTES_BIG_ENDIAN)
4276 value
4277 = fold (build (LSHIFT_EXPR, type, value,
4278 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4279 bitsize = BITS_PER_WORD;
4280 mode = word_mode;
4281 }
4282 #endif
4283 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4284 TREE_VALUE (elt), type,
4285 MIN (align,
4286 DECL_ALIGN (TREE_PURPOSE (elt))),
4287 cleared);
4288 }
4289 }
4290 else if (TREE_CODE (type) == ARRAY_TYPE)
4291 {
4292 register tree elt;
4293 register int i;
4294 int need_to_clear;
4295 tree domain = TYPE_DOMAIN (type);
4296 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4297 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4298 tree elttype = TREE_TYPE (type);
4299
4300 /* If the constructor has fewer elements than the array,
4301 clear the whole array first. Similarly if this is
4302 static constructor of a non-BLKmode object. */
4303 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4304 need_to_clear = 1;
4305 else
4306 {
4307 HOST_WIDE_INT count = 0, zero_count = 0;
4308 need_to_clear = 0;
4309 /* This loop is a more accurate version of the loop in
4310 mostly_zeros_p (it handles RANGE_EXPR in an index).
4311 It is also needed to check for missing elements. */
4312 for (elt = CONSTRUCTOR_ELTS (exp);
4313 elt != NULL_TREE;
4314 elt = TREE_CHAIN (elt))
4315 {
4316 tree index = TREE_PURPOSE (elt);
4317 HOST_WIDE_INT this_node_count;
4318 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4319 {
4320 tree lo_index = TREE_OPERAND (index, 0);
4321 tree hi_index = TREE_OPERAND (index, 1);
4322 if (TREE_CODE (lo_index) != INTEGER_CST
4323 || TREE_CODE (hi_index) != INTEGER_CST)
4324 {
4325 need_to_clear = 1;
4326 break;
4327 }
4328 this_node_count = TREE_INT_CST_LOW (hi_index)
4329 - TREE_INT_CST_LOW (lo_index) + 1;
4330 }
4331 else
4332 this_node_count = 1;
4333 count += this_node_count;
4334 if (mostly_zeros_p (TREE_VALUE (elt)))
4335 zero_count += this_node_count;
4336 }
4337 /* Clear the entire array first if there are any missing elements,
4338 or if the incidence of zero elements is >= 75%. */
4339 if (count < maxelt - minelt + 1
4340 || 4 * zero_count >= 3 * count)
4341 need_to_clear = 1;
4342 }
4343 if (need_to_clear && size > 0)
4344 {
4345 if (! cleared)
4346 clear_storage (target, GEN_INT (size),
4347 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4348 cleared = 1;
4349 }
4350 else
4351 /* Inform later passes that the old value is dead. */
4352 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4353
4354 /* Store each element of the constructor into
4355 the corresponding element of TARGET, determined
4356 by counting the elements. */
4357 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4358 elt;
4359 elt = TREE_CHAIN (elt), i++)
4360 {
4361 register enum machine_mode mode;
4362 int bitsize;
4363 int bitpos;
4364 int unsignedp;
4365 tree value = TREE_VALUE (elt);
4366 int align = TYPE_ALIGN (TREE_TYPE (value));
4367 tree index = TREE_PURPOSE (elt);
4368 rtx xtarget = target;
4369
4370 if (cleared && is_zeros_p (value))
4371 continue;
4372
4373 unsignedp = TREE_UNSIGNED (elttype);
4374 mode = TYPE_MODE (elttype);
4375 if (mode == BLKmode)
4376 {
4377 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4378 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4379 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4380 else
4381 bitsize = -1;
4382 }
4383 else
4384 bitsize = GET_MODE_BITSIZE (mode);
4385
4386 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4387 {
4388 tree lo_index = TREE_OPERAND (index, 0);
4389 tree hi_index = TREE_OPERAND (index, 1);
4390 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4391 struct nesting *loop;
4392 HOST_WIDE_INT lo, hi, count;
4393 tree position;
4394
4395 /* If the range is constant and "small", unroll the loop. */
4396 if (TREE_CODE (lo_index) == INTEGER_CST
4397 && TREE_CODE (hi_index) == INTEGER_CST
4398 && (lo = TREE_INT_CST_LOW (lo_index),
4399 hi = TREE_INT_CST_LOW (hi_index),
4400 count = hi - lo + 1,
4401 (GET_CODE (target) != MEM
4402 || count <= 2
4403 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4404 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4405 <= 40 * 8))))
4406 {
4407 lo -= minelt; hi -= minelt;
4408 for (; lo <= hi; lo++)
4409 {
4410 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4411 store_constructor_field (target, bitsize, bitpos, mode,
4412 value, type, align, cleared);
4413 }
4414 }
4415 else
4416 {
4417 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4418 loop_top = gen_label_rtx ();
4419 loop_end = gen_label_rtx ();
4420
4421 unsignedp = TREE_UNSIGNED (domain);
4422
4423 index = build_decl (VAR_DECL, NULL_TREE, domain);
4424
4425 DECL_RTL (index) = index_r
4426 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4427 &unsignedp, 0));
4428
4429 if (TREE_CODE (value) == SAVE_EXPR
4430 && SAVE_EXPR_RTL (value) == 0)
4431 {
4432 /* Make sure value gets expanded once before the
4433 loop. */
4434 expand_expr (value, const0_rtx, VOIDmode, 0);
4435 emit_queue ();
4436 }
4437 store_expr (lo_index, index_r, 0);
4438 loop = expand_start_loop (0);
4439
4440 /* Assign value to element index. */
4441 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4442 size_int (BITS_PER_UNIT));
4443 position = size_binop (MULT_EXPR,
4444 size_binop (MINUS_EXPR, index,
4445 TYPE_MIN_VALUE (domain)),
4446 position);
4447 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4448 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4449 xtarget = change_address (target, mode, addr);
4450 if (TREE_CODE (value) == CONSTRUCTOR)
4451 store_constructor (value, xtarget, align, cleared,
4452 bitsize / BITS_PER_UNIT);
4453 else
4454 store_expr (value, xtarget, 0);
4455
4456 expand_exit_loop_if_false (loop,
4457 build (LT_EXPR, integer_type_node,
4458 index, hi_index));
4459
4460 expand_increment (build (PREINCREMENT_EXPR,
4461 TREE_TYPE (index),
4462 index, integer_one_node), 0, 0);
4463 expand_end_loop ();
4464 emit_label (loop_end);
4465
4466 /* Needed by stupid register allocation. to extend the
4467 lifetime of pseudo-regs used by target past the end
4468 of the loop. */
4469 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4470 }
4471 }
4472 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4473 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4474 {
4475 rtx pos_rtx, addr;
4476 tree position;
4477
4478 if (index == 0)
4479 index = size_int (i);
4480
4481 if (minelt)
4482 index = size_binop (MINUS_EXPR, index,
4483 TYPE_MIN_VALUE (domain));
4484 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4485 size_int (BITS_PER_UNIT));
4486 position = size_binop (MULT_EXPR, index, position);
4487 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4488 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4489 xtarget = change_address (target, mode, addr);
4490 store_expr (value, xtarget, 0);
4491 }
4492 else
4493 {
4494 if (index != 0)
4495 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4496 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4497 else
4498 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4499 store_constructor_field (target, bitsize, bitpos, mode, value,
4500 type, align, cleared);
4501 }
4502 }
4503 }
4504 /* set constructor assignments */
4505 else if (TREE_CODE (type) == SET_TYPE)
4506 {
4507 tree elt = CONSTRUCTOR_ELTS (exp);
4508 int nbytes = int_size_in_bytes (type), nbits;
4509 tree domain = TYPE_DOMAIN (type);
4510 tree domain_min, domain_max, bitlength;
4511
4512 /* The default implementation strategy is to extract the constant
4513 parts of the constructor, use that to initialize the target,
4514 and then "or" in whatever non-constant ranges we need in addition.
4515
4516 If a large set is all zero or all ones, it is
4517 probably better to set it using memset (if available) or bzero.
4518 Also, if a large set has just a single range, it may also be
4519 better to first clear all the first clear the set (using
4520 bzero/memset), and set the bits we want. */
4521
4522 /* Check for all zeros. */
4523 if (elt == NULL_TREE && size > 0)
4524 {
4525 if (!cleared)
4526 clear_storage (target, GEN_INT (size),
4527 TYPE_ALIGN (type) / BITS_PER_UNIT);
4528 return;
4529 }
4530
4531 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4532 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4533 bitlength = size_binop (PLUS_EXPR,
4534 size_binop (MINUS_EXPR, domain_max, domain_min),
4535 size_one_node);
4536
4537 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4538 abort ();
4539 nbits = TREE_INT_CST_LOW (bitlength);
4540
4541 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4542 are "complicated" (more than one range), initialize (the
4543 constant parts) by copying from a constant. */
4544 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4545 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4546 {
4547 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4548 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4549 char *bit_buffer = (char *) alloca (nbits);
4550 HOST_WIDE_INT word = 0;
4551 int bit_pos = 0;
4552 int ibit = 0;
4553 int offset = 0; /* In bytes from beginning of set. */
4554 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4555 for (;;)
4556 {
4557 if (bit_buffer[ibit])
4558 {
4559 if (BYTES_BIG_ENDIAN)
4560 word |= (1 << (set_word_size - 1 - bit_pos));
4561 else
4562 word |= 1 << bit_pos;
4563 }
4564 bit_pos++; ibit++;
4565 if (bit_pos >= set_word_size || ibit == nbits)
4566 {
4567 if (word != 0 || ! cleared)
4568 {
4569 rtx datum = GEN_INT (word);
4570 rtx to_rtx;
4571 /* The assumption here is that it is safe to use
4572 XEXP if the set is multi-word, but not if
4573 it's single-word. */
4574 if (GET_CODE (target) == MEM)
4575 {
4576 to_rtx = plus_constant (XEXP (target, 0), offset);
4577 to_rtx = change_address (target, mode, to_rtx);
4578 }
4579 else if (offset == 0)
4580 to_rtx = target;
4581 else
4582 abort ();
4583 emit_move_insn (to_rtx, datum);
4584 }
4585 if (ibit == nbits)
4586 break;
4587 word = 0;
4588 bit_pos = 0;
4589 offset += set_word_size / BITS_PER_UNIT;
4590 }
4591 }
4592 }
4593 else if (!cleared)
4594 {
4595 /* Don't bother clearing storage if the set is all ones. */
4596 if (TREE_CHAIN (elt) != NULL_TREE
4597 || (TREE_PURPOSE (elt) == NULL_TREE
4598 ? nbits != 1
4599 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4600 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4601 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4602 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4603 != nbits))))
4604 clear_storage (target, expr_size (exp),
4605 TYPE_ALIGN (type) / BITS_PER_UNIT);
4606 }
4607
4608 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4609 {
4610 /* start of range of element or NULL */
4611 tree startbit = TREE_PURPOSE (elt);
4612 /* end of range of element, or element value */
4613 tree endbit = TREE_VALUE (elt);
4614 #ifdef TARGET_MEM_FUNCTIONS
4615 HOST_WIDE_INT startb, endb;
4616 #endif
4617 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4618
4619 bitlength_rtx = expand_expr (bitlength,
4620 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4621
4622 /* handle non-range tuple element like [ expr ] */
4623 if (startbit == NULL_TREE)
4624 {
4625 startbit = save_expr (endbit);
4626 endbit = startbit;
4627 }
4628 startbit = convert (sizetype, startbit);
4629 endbit = convert (sizetype, endbit);
4630 if (! integer_zerop (domain_min))
4631 {
4632 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4633 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4634 }
4635 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4636 EXPAND_CONST_ADDRESS);
4637 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4638 EXPAND_CONST_ADDRESS);
4639
4640 if (REG_P (target))
4641 {
4642 targetx = assign_stack_temp (GET_MODE (target),
4643 GET_MODE_SIZE (GET_MODE (target)),
4644 0);
4645 emit_move_insn (targetx, target);
4646 }
4647 else if (GET_CODE (target) == MEM)
4648 targetx = target;
4649 else
4650 abort ();
4651
4652 #ifdef TARGET_MEM_FUNCTIONS
4653 /* Optimization: If startbit and endbit are
4654 constants divisible by BITS_PER_UNIT,
4655 call memset instead. */
4656 if (TREE_CODE (startbit) == INTEGER_CST
4657 && TREE_CODE (endbit) == INTEGER_CST
4658 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4659 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4660 {
4661 emit_library_call (memset_libfunc, 0,
4662 VOIDmode, 3,
4663 plus_constant (XEXP (targetx, 0),
4664 startb / BITS_PER_UNIT),
4665 Pmode,
4666 constm1_rtx, TYPE_MODE (integer_type_node),
4667 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4668 TYPE_MODE (sizetype));
4669 }
4670 else
4671 #endif
4672 {
4673 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4674 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4675 bitlength_rtx, TYPE_MODE (sizetype),
4676 startbit_rtx, TYPE_MODE (sizetype),
4677 endbit_rtx, TYPE_MODE (sizetype));
4678 }
4679 if (REG_P (target))
4680 emit_move_insn (target, targetx);
4681 }
4682 }
4683
4684 else
4685 abort ();
4686 }
4687
4688 /* Store the value of EXP (an expression tree)
4689 into a subfield of TARGET which has mode MODE and occupies
4690 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4691 If MODE is VOIDmode, it means that we are storing into a bit-field.
4692
4693 If VALUE_MODE is VOIDmode, return nothing in particular.
4694 UNSIGNEDP is not used in this case.
4695
4696 Otherwise, return an rtx for the value stored. This rtx
4697 has mode VALUE_MODE if that is convenient to do.
4698 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4699
4700 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4701 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4702
4703 ALIAS_SET is the alias set for the destination. This value will
4704 (in general) be different from that for TARGET, since TARGET is a
4705 reference to the containing structure. */
4706
4707 static rtx
4708 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4709 unsignedp, align, total_size, alias_set)
4710 rtx target;
4711 int bitsize, bitpos;
4712 enum machine_mode mode;
4713 tree exp;
4714 enum machine_mode value_mode;
4715 int unsignedp;
4716 int align;
4717 int total_size;
4718 int alias_set;
4719 {
4720 HOST_WIDE_INT width_mask = 0;
4721
4722 if (TREE_CODE (exp) == ERROR_MARK)
4723 return const0_rtx;
4724
4725 if (bitsize < HOST_BITS_PER_WIDE_INT)
4726 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4727
4728 /* If we are storing into an unaligned field of an aligned union that is
4729 in a register, we may have the mode of TARGET being an integer mode but
4730 MODE == BLKmode. In that case, get an aligned object whose size and
4731 alignment are the same as TARGET and store TARGET into it (we can avoid
4732 the store if the field being stored is the entire width of TARGET). Then
4733 call ourselves recursively to store the field into a BLKmode version of
4734 that object. Finally, load from the object into TARGET. This is not
4735 very efficient in general, but should only be slightly more expensive
4736 than the otherwise-required unaligned accesses. Perhaps this can be
4737 cleaned up later. */
4738
4739 if (mode == BLKmode
4740 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4741 {
4742 rtx object = assign_stack_temp (GET_MODE (target),
4743 GET_MODE_SIZE (GET_MODE (target)), 0);
4744 rtx blk_object = copy_rtx (object);
4745
4746 MEM_SET_IN_STRUCT_P (object, 1);
4747 MEM_SET_IN_STRUCT_P (blk_object, 1);
4748 PUT_MODE (blk_object, BLKmode);
4749
4750 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4751 emit_move_insn (object, target);
4752
4753 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4754 align, total_size, alias_set);
4755
4756 /* Even though we aren't returning target, we need to
4757 give it the updated value. */
4758 emit_move_insn (target, object);
4759
4760 return blk_object;
4761 }
4762
4763 /* If the structure is in a register or if the component
4764 is a bit field, we cannot use addressing to access it.
4765 Use bit-field techniques or SUBREG to store in it. */
4766
4767 if (mode == VOIDmode
4768 || (mode != BLKmode && ! direct_store[(int) mode]
4769 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4770 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4771 || GET_CODE (target) == REG
4772 || GET_CODE (target) == SUBREG
4773 /* If the field isn't aligned enough to store as an ordinary memref,
4774 store it as a bit field. */
4775 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS
4776 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4777 || bitpos % GET_MODE_ALIGNMENT (mode)))
4778 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS
4779 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4780 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4781 /* If the RHS and field are a constant size and the size of the
4782 RHS isn't the same size as the bitfield, we must use bitfield
4783 operations. */
4784 || ((bitsize >= 0
4785 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
4786 && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp))) != 0
4787 || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) != bitsize)))
4788 {
4789 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4790
4791 /* If BITSIZE is narrower than the size of the type of EXP
4792 we will be narrowing TEMP. Normally, what's wanted are the
4793 low-order bits. However, if EXP's type is a record and this is
4794 big-endian machine, we want the upper BITSIZE bits. */
4795 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4796 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4797 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4798 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4799 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4800 - bitsize),
4801 temp, 1);
4802
4803 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4804 MODE. */
4805 if (mode != VOIDmode && mode != BLKmode
4806 && mode != TYPE_MODE (TREE_TYPE (exp)))
4807 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4808
4809 /* If the modes of TARGET and TEMP are both BLKmode, both
4810 must be in memory and BITPOS must be aligned on a byte
4811 boundary. If so, we simply do a block copy. */
4812 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4813 {
4814 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4815 || bitpos % BITS_PER_UNIT != 0)
4816 abort ();
4817
4818 target = change_address (target, VOIDmode,
4819 plus_constant (XEXP (target, 0),
4820 bitpos / BITS_PER_UNIT));
4821
4822 /* Find an alignment that is consistent with the bit position. */
4823 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4824 align >>= 1;
4825
4826 emit_block_move (target, temp,
4827 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4828 / BITS_PER_UNIT),
4829 align);
4830
4831 return value_mode == VOIDmode ? const0_rtx : target;
4832 }
4833
4834 /* Store the value in the bitfield. */
4835 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4836 if (value_mode != VOIDmode)
4837 {
4838 /* The caller wants an rtx for the value. */
4839 /* If possible, avoid refetching from the bitfield itself. */
4840 if (width_mask != 0
4841 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4842 {
4843 tree count;
4844 enum machine_mode tmode;
4845
4846 if (unsignedp)
4847 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4848 tmode = GET_MODE (temp);
4849 if (tmode == VOIDmode)
4850 tmode = value_mode;
4851 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4852 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4853 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4854 }
4855 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4856 NULL_RTX, value_mode, 0, align,
4857 total_size);
4858 }
4859 return const0_rtx;
4860 }
4861 else
4862 {
4863 rtx addr = XEXP (target, 0);
4864 rtx to_rtx;
4865
4866 /* If a value is wanted, it must be the lhs;
4867 so make the address stable for multiple use. */
4868
4869 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4870 && ! CONSTANT_ADDRESS_P (addr)
4871 /* A frame-pointer reference is already stable. */
4872 && ! (GET_CODE (addr) == PLUS
4873 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4874 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4875 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4876 addr = copy_to_reg (addr);
4877
4878 /* Now build a reference to just the desired component. */
4879
4880 to_rtx = copy_rtx (change_address (target, mode,
4881 plus_constant (addr,
4882 (bitpos
4883 / BITS_PER_UNIT))));
4884 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4885 MEM_ALIAS_SET (to_rtx) = alias_set;
4886
4887 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4888 }
4889 }
4890 \f
4891 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4892 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4893 ARRAY_REFs and find the ultimate containing object, which we return.
4894
4895 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4896 bit position, and *PUNSIGNEDP to the signedness of the field.
4897 If the position of the field is variable, we store a tree
4898 giving the variable offset (in units) in *POFFSET.
4899 This offset is in addition to the bit position.
4900 If the position is not variable, we store 0 in *POFFSET.
4901 We set *PALIGNMENT to the alignment in bytes of the address that will be
4902 computed. This is the alignment of the thing we return if *POFFSET
4903 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4904
4905 If any of the extraction expressions is volatile,
4906 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4907
4908 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4909 is a mode that can be used to access the field. In that case, *PBITSIZE
4910 is redundant.
4911
4912 If the field describes a variable-sized object, *PMODE is set to
4913 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4914 this case, but the address of the object can be found. */
4915
4916 tree
4917 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4918 punsignedp, pvolatilep, palignment)
4919 tree exp;
4920 int *pbitsize;
4921 int *pbitpos;
4922 tree *poffset;
4923 enum machine_mode *pmode;
4924 int *punsignedp;
4925 int *pvolatilep;
4926 int *palignment;
4927 {
4928 tree orig_exp = exp;
4929 tree size_tree = 0;
4930 enum machine_mode mode = VOIDmode;
4931 tree offset = integer_zero_node;
4932 unsigned int alignment = BIGGEST_ALIGNMENT;
4933
4934 if (TREE_CODE (exp) == COMPONENT_REF)
4935 {
4936 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4937 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4938 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4939 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4940 }
4941 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4942 {
4943 size_tree = TREE_OPERAND (exp, 1);
4944 *punsignedp = TREE_UNSIGNED (exp);
4945 }
4946 else
4947 {
4948 mode = TYPE_MODE (TREE_TYPE (exp));
4949 if (mode == BLKmode)
4950 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4951
4952 *pbitsize = GET_MODE_BITSIZE (mode);
4953 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4954 }
4955
4956 if (size_tree)
4957 {
4958 if (TREE_CODE (size_tree) != INTEGER_CST)
4959 mode = BLKmode, *pbitsize = -1;
4960 else
4961 *pbitsize = TREE_INT_CST_LOW (size_tree);
4962 }
4963
4964 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4965 and find the ultimate containing object. */
4966
4967 *pbitpos = 0;
4968
4969 while (1)
4970 {
4971 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4972 {
4973 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4974 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4975 : TREE_OPERAND (exp, 2));
4976 tree constant = integer_zero_node, var = pos;
4977
4978 /* If this field hasn't been filled in yet, don't go
4979 past it. This should only happen when folding expressions
4980 made during type construction. */
4981 if (pos == 0)
4982 break;
4983
4984 /* Assume here that the offset is a multiple of a unit.
4985 If not, there should be an explicitly added constant. */
4986 if (TREE_CODE (pos) == PLUS_EXPR
4987 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4988 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4989 else if (TREE_CODE (pos) == INTEGER_CST)
4990 constant = pos, var = integer_zero_node;
4991
4992 *pbitpos += TREE_INT_CST_LOW (constant);
4993 offset = size_binop (PLUS_EXPR, offset,
4994 size_binop (EXACT_DIV_EXPR, var,
4995 size_int (BITS_PER_UNIT)));
4996 }
4997
4998 else if (TREE_CODE (exp) == ARRAY_REF)
4999 {
5000 /* This code is based on the code in case ARRAY_REF in expand_expr
5001 below. We assume here that the size of an array element is
5002 always an integral multiple of BITS_PER_UNIT. */
5003
5004 tree index = TREE_OPERAND (exp, 1);
5005 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5006 tree low_bound
5007 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5008 tree index_type = TREE_TYPE (index);
5009 tree xindex;
5010
5011 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5012 {
5013 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5014 index);
5015 index_type = TREE_TYPE (index);
5016 }
5017
5018 /* Optimize the special-case of a zero lower bound.
5019
5020 We convert the low_bound to sizetype to avoid some problems
5021 with constant folding. (E.g. suppose the lower bound is 1,
5022 and its mode is QI. Without the conversion, (ARRAY
5023 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5024 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5025
5026 But sizetype isn't quite right either (especially if
5027 the lowbound is negative). FIXME */
5028
5029 if (! integer_zerop (low_bound))
5030 index = fold (build (MINUS_EXPR, index_type, index,
5031 convert (sizetype, low_bound)));
5032
5033 if (TREE_CODE (index) == INTEGER_CST)
5034 {
5035 index = convert (sbitsizetype, index);
5036 index_type = TREE_TYPE (index);
5037 }
5038
5039 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5040 convert (sbitsizetype,
5041 TYPE_SIZE (TREE_TYPE (exp)))));
5042
5043 if (TREE_CODE (xindex) == INTEGER_CST
5044 && TREE_INT_CST_HIGH (xindex) == 0)
5045 *pbitpos += TREE_INT_CST_LOW (xindex);
5046 else
5047 {
5048 /* Either the bit offset calculated above is not constant, or
5049 it overflowed. In either case, redo the multiplication
5050 against the size in units. This is especially important
5051 in the non-constant case to avoid a division at runtime. */
5052 xindex = fold (build (MULT_EXPR, ssizetype, index,
5053 convert (ssizetype,
5054 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5055
5056 if (contains_placeholder_p (xindex))
5057 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5058
5059 offset = size_binop (PLUS_EXPR, offset, xindex);
5060 }
5061 }
5062 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5063 && ! ((TREE_CODE (exp) == NOP_EXPR
5064 || TREE_CODE (exp) == CONVERT_EXPR)
5065 && (TYPE_MODE (TREE_TYPE (exp))
5066 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5067 break;
5068
5069 /* If any reference in the chain is volatile, the effect is volatile. */
5070 if (TREE_THIS_VOLATILE (exp))
5071 *pvolatilep = 1;
5072
5073 /* If the offset is non-constant already, then we can't assume any
5074 alignment more than the alignment here. */
5075 if (! integer_zerop (offset))
5076 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5077
5078 exp = TREE_OPERAND (exp, 0);
5079 }
5080
5081 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5082 alignment = MIN (alignment, DECL_ALIGN (exp));
5083 else if (TREE_TYPE (exp) != 0)
5084 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5085
5086 if (integer_zerop (offset))
5087 offset = 0;
5088
5089 if (offset != 0 && contains_placeholder_p (offset))
5090 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5091
5092 *pmode = mode;
5093 *poffset = offset;
5094 *palignment = alignment / BITS_PER_UNIT;
5095 return exp;
5096 }
5097
5098 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5099 static enum memory_use_mode
5100 get_memory_usage_from_modifier (modifier)
5101 enum expand_modifier modifier;
5102 {
5103 switch (modifier)
5104 {
5105 case EXPAND_NORMAL:
5106 case EXPAND_SUM:
5107 return MEMORY_USE_RO;
5108 break;
5109 case EXPAND_MEMORY_USE_WO:
5110 return MEMORY_USE_WO;
5111 break;
5112 case EXPAND_MEMORY_USE_RW:
5113 return MEMORY_USE_RW;
5114 break;
5115 case EXPAND_MEMORY_USE_DONT:
5116 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5117 MEMORY_USE_DONT, because they are modifiers to a call of
5118 expand_expr in the ADDR_EXPR case of expand_expr. */
5119 case EXPAND_CONST_ADDRESS:
5120 case EXPAND_INITIALIZER:
5121 return MEMORY_USE_DONT;
5122 case EXPAND_MEMORY_USE_BAD:
5123 default:
5124 abort ();
5125 }
5126 }
5127 \f
5128 /* Given an rtx VALUE that may contain additions and multiplications,
5129 return an equivalent value that just refers to a register or memory.
5130 This is done by generating instructions to perform the arithmetic
5131 and returning a pseudo-register containing the value.
5132
5133 The returned value may be a REG, SUBREG, MEM or constant. */
5134
5135 rtx
5136 force_operand (value, target)
5137 rtx value, target;
5138 {
5139 register optab binoptab = 0;
5140 /* Use a temporary to force order of execution of calls to
5141 `force_operand'. */
5142 rtx tmp;
5143 register rtx op2;
5144 /* Use subtarget as the target for operand 0 of a binary operation. */
5145 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5146
5147 /* Check for a PIC address load. */
5148 if (flag_pic
5149 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5150 && XEXP (value, 0) == pic_offset_table_rtx
5151 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5152 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5153 || GET_CODE (XEXP (value, 1)) == CONST))
5154 {
5155 if (!subtarget)
5156 subtarget = gen_reg_rtx (GET_MODE (value));
5157 emit_move_insn (subtarget, value);
5158 return subtarget;
5159 }
5160
5161 if (GET_CODE (value) == PLUS)
5162 binoptab = add_optab;
5163 else if (GET_CODE (value) == MINUS)
5164 binoptab = sub_optab;
5165 else if (GET_CODE (value) == MULT)
5166 {
5167 op2 = XEXP (value, 1);
5168 if (!CONSTANT_P (op2)
5169 && !(GET_CODE (op2) == REG && op2 != subtarget))
5170 subtarget = 0;
5171 tmp = force_operand (XEXP (value, 0), subtarget);
5172 return expand_mult (GET_MODE (value), tmp,
5173 force_operand (op2, NULL_RTX),
5174 target, 0);
5175 }
5176
5177 if (binoptab)
5178 {
5179 op2 = XEXP (value, 1);
5180 if (!CONSTANT_P (op2)
5181 && !(GET_CODE (op2) == REG && op2 != subtarget))
5182 subtarget = 0;
5183 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5184 {
5185 binoptab = add_optab;
5186 op2 = negate_rtx (GET_MODE (value), op2);
5187 }
5188
5189 /* Check for an addition with OP2 a constant integer and our first
5190 operand a PLUS of a virtual register and something else. In that
5191 case, we want to emit the sum of the virtual register and the
5192 constant first and then add the other value. This allows virtual
5193 register instantiation to simply modify the constant rather than
5194 creating another one around this addition. */
5195 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5196 && GET_CODE (XEXP (value, 0)) == PLUS
5197 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5198 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5199 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5200 {
5201 rtx temp = expand_binop (GET_MODE (value), binoptab,
5202 XEXP (XEXP (value, 0), 0), op2,
5203 subtarget, 0, OPTAB_LIB_WIDEN);
5204 return expand_binop (GET_MODE (value), binoptab, temp,
5205 force_operand (XEXP (XEXP (value, 0), 1), 0),
5206 target, 0, OPTAB_LIB_WIDEN);
5207 }
5208
5209 tmp = force_operand (XEXP (value, 0), subtarget);
5210 return expand_binop (GET_MODE (value), binoptab, tmp,
5211 force_operand (op2, NULL_RTX),
5212 target, 0, OPTAB_LIB_WIDEN);
5213 /* We give UNSIGNEDP = 0 to expand_binop
5214 because the only operations we are expanding here are signed ones. */
5215 }
5216 return value;
5217 }
5218 \f
5219 /* Subroutine of expand_expr:
5220 save the non-copied parts (LIST) of an expr (LHS), and return a list
5221 which can restore these values to their previous values,
5222 should something modify their storage. */
5223
5224 static tree
5225 save_noncopied_parts (lhs, list)
5226 tree lhs;
5227 tree list;
5228 {
5229 tree tail;
5230 tree parts = 0;
5231
5232 for (tail = list; tail; tail = TREE_CHAIN (tail))
5233 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5234 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5235 else
5236 {
5237 tree part = TREE_VALUE (tail);
5238 tree part_type = TREE_TYPE (part);
5239 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5240 rtx target = assign_temp (part_type, 0, 1, 1);
5241 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5242 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5243 parts = tree_cons (to_be_saved,
5244 build (RTL_EXPR, part_type, NULL_TREE,
5245 (tree) target),
5246 parts);
5247 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5248 }
5249 return parts;
5250 }
5251
5252 /* Subroutine of expand_expr:
5253 record the non-copied parts (LIST) of an expr (LHS), and return a list
5254 which specifies the initial values of these parts. */
5255
5256 static tree
5257 init_noncopied_parts (lhs, list)
5258 tree lhs;
5259 tree list;
5260 {
5261 tree tail;
5262 tree parts = 0;
5263
5264 for (tail = list; tail; tail = TREE_CHAIN (tail))
5265 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5266 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5267 else if (TREE_PURPOSE (tail))
5268 {
5269 tree part = TREE_VALUE (tail);
5270 tree part_type = TREE_TYPE (part);
5271 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5272 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5273 }
5274 return parts;
5275 }
5276
5277 /* Subroutine of expand_expr: return nonzero iff there is no way that
5278 EXP can reference X, which is being modified. TOP_P is nonzero if this
5279 call is going to be used to determine whether we need a temporary
5280 for EXP, as opposed to a recursive call to this function.
5281
5282 It is always safe for this routine to return zero since it merely
5283 searches for optimization opportunities. */
5284
5285 static int
5286 safe_from_p (x, exp, top_p)
5287 rtx x;
5288 tree exp;
5289 int top_p;
5290 {
5291 rtx exp_rtl = 0;
5292 int i, nops;
5293 static int save_expr_count;
5294 static int save_expr_size = 0;
5295 static tree *save_expr_rewritten;
5296 static tree save_expr_trees[256];
5297
5298 if (x == 0
5299 /* If EXP has varying size, we MUST use a target since we currently
5300 have no way of allocating temporaries of variable size
5301 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5302 So we assume here that something at a higher level has prevented a
5303 clash. This is somewhat bogus, but the best we can do. Only
5304 do this when X is BLKmode and when we are at the top level. */
5305 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5306 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5307 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5308 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5309 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5310 != INTEGER_CST)
5311 && GET_MODE (x) == BLKmode))
5312 return 1;
5313
5314 if (top_p && save_expr_size == 0)
5315 {
5316 int rtn;
5317
5318 save_expr_count = 0;
5319 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5320 save_expr_rewritten = &save_expr_trees[0];
5321
5322 rtn = safe_from_p (x, exp, 1);
5323
5324 for (i = 0; i < save_expr_count; ++i)
5325 {
5326 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5327 abort ();
5328 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5329 }
5330
5331 save_expr_size = 0;
5332
5333 return rtn;
5334 }
5335
5336 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5337 find the underlying pseudo. */
5338 if (GET_CODE (x) == SUBREG)
5339 {
5340 x = SUBREG_REG (x);
5341 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5342 return 0;
5343 }
5344
5345 /* If X is a location in the outgoing argument area, it is always safe. */
5346 if (GET_CODE (x) == MEM
5347 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5348 || (GET_CODE (XEXP (x, 0)) == PLUS
5349 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5350 return 1;
5351
5352 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5353 {
5354 case 'd':
5355 exp_rtl = DECL_RTL (exp);
5356 break;
5357
5358 case 'c':
5359 return 1;
5360
5361 case 'x':
5362 if (TREE_CODE (exp) == TREE_LIST)
5363 return ((TREE_VALUE (exp) == 0
5364 || safe_from_p (x, TREE_VALUE (exp), 0))
5365 && (TREE_CHAIN (exp) == 0
5366 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5367 else if (TREE_CODE (exp) == ERROR_MARK)
5368 return 1; /* An already-visited SAVE_EXPR? */
5369 else
5370 return 0;
5371
5372 case '1':
5373 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5374
5375 case '2':
5376 case '<':
5377 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5378 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5379
5380 case 'e':
5381 case 'r':
5382 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5383 the expression. If it is set, we conflict iff we are that rtx or
5384 both are in memory. Otherwise, we check all operands of the
5385 expression recursively. */
5386
5387 switch (TREE_CODE (exp))
5388 {
5389 case ADDR_EXPR:
5390 return (staticp (TREE_OPERAND (exp, 0))
5391 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5392 || TREE_STATIC (exp));
5393
5394 case INDIRECT_REF:
5395 if (GET_CODE (x) == MEM)
5396 return 0;
5397 break;
5398
5399 case CALL_EXPR:
5400 exp_rtl = CALL_EXPR_RTL (exp);
5401 if (exp_rtl == 0)
5402 {
5403 /* Assume that the call will clobber all hard registers and
5404 all of memory. */
5405 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5406 || GET_CODE (x) == MEM)
5407 return 0;
5408 }
5409
5410 break;
5411
5412 case RTL_EXPR:
5413 /* If a sequence exists, we would have to scan every instruction
5414 in the sequence to see if it was safe. This is probably not
5415 worthwhile. */
5416 if (RTL_EXPR_SEQUENCE (exp))
5417 return 0;
5418
5419 exp_rtl = RTL_EXPR_RTL (exp);
5420 break;
5421
5422 case WITH_CLEANUP_EXPR:
5423 exp_rtl = RTL_EXPR_RTL (exp);
5424 break;
5425
5426 case CLEANUP_POINT_EXPR:
5427 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5428
5429 case SAVE_EXPR:
5430 exp_rtl = SAVE_EXPR_RTL (exp);
5431 if (exp_rtl)
5432 break;
5433
5434 /* This SAVE_EXPR might appear many times in the top-level
5435 safe_from_p() expression, and if it has a complex
5436 subexpression, examining it multiple times could result
5437 in a combinatorial explosion. E.g. on an Alpha
5438 running at least 200MHz, a Fortran test case compiled with
5439 optimization took about 28 minutes to compile -- even though
5440 it was only a few lines long, and the complicated line causing
5441 so much time to be spent in the earlier version of safe_from_p()
5442 had only 293 or so unique nodes.
5443
5444 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5445 where it is so we can turn it back in the top-level safe_from_p()
5446 when we're done. */
5447
5448 /* For now, don't bother re-sizing the array. */
5449 if (save_expr_count >= save_expr_size)
5450 return 0;
5451 save_expr_rewritten[save_expr_count++] = exp;
5452
5453 nops = tree_code_length[(int) SAVE_EXPR];
5454 for (i = 0; i < nops; i++)
5455 {
5456 tree operand = TREE_OPERAND (exp, i);
5457 if (operand == NULL_TREE)
5458 continue;
5459 TREE_SET_CODE (exp, ERROR_MARK);
5460 if (!safe_from_p (x, operand, 0))
5461 return 0;
5462 TREE_SET_CODE (exp, SAVE_EXPR);
5463 }
5464 TREE_SET_CODE (exp, ERROR_MARK);
5465 return 1;
5466
5467 case BIND_EXPR:
5468 /* The only operand we look at is operand 1. The rest aren't
5469 part of the expression. */
5470 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5471
5472 case METHOD_CALL_EXPR:
5473 /* This takes a rtx argument, but shouldn't appear here. */
5474 abort ();
5475
5476 default:
5477 break;
5478 }
5479
5480 /* If we have an rtx, we do not need to scan our operands. */
5481 if (exp_rtl)
5482 break;
5483
5484 nops = tree_code_length[(int) TREE_CODE (exp)];
5485 for (i = 0; i < nops; i++)
5486 if (TREE_OPERAND (exp, i) != 0
5487 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5488 return 0;
5489 }
5490
5491 /* If we have an rtl, find any enclosed object. Then see if we conflict
5492 with it. */
5493 if (exp_rtl)
5494 {
5495 if (GET_CODE (exp_rtl) == SUBREG)
5496 {
5497 exp_rtl = SUBREG_REG (exp_rtl);
5498 if (GET_CODE (exp_rtl) == REG
5499 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5500 return 0;
5501 }
5502
5503 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5504 are memory and EXP is not readonly. */
5505 return ! (rtx_equal_p (x, exp_rtl)
5506 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5507 && ! TREE_READONLY (exp)));
5508 }
5509
5510 /* If we reach here, it is safe. */
5511 return 1;
5512 }
5513
5514 /* Subroutine of expand_expr: return nonzero iff EXP is an
5515 expression whose type is statically determinable. */
5516
5517 static int
5518 fixed_type_p (exp)
5519 tree exp;
5520 {
5521 if (TREE_CODE (exp) == PARM_DECL
5522 || TREE_CODE (exp) == VAR_DECL
5523 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5524 || TREE_CODE (exp) == COMPONENT_REF
5525 || TREE_CODE (exp) == ARRAY_REF)
5526 return 1;
5527 return 0;
5528 }
5529
5530 /* Subroutine of expand_expr: return rtx if EXP is a
5531 variable or parameter; else return 0. */
5532
5533 static rtx
5534 var_rtx (exp)
5535 tree exp;
5536 {
5537 STRIP_NOPS (exp);
5538 switch (TREE_CODE (exp))
5539 {
5540 case PARM_DECL:
5541 case VAR_DECL:
5542 return DECL_RTL (exp);
5543 default:
5544 return 0;
5545 }
5546 }
5547
5548 #ifdef MAX_INTEGER_COMPUTATION_MODE
5549 void
5550 check_max_integer_computation_mode (exp)
5551 tree exp;
5552 {
5553 enum tree_code code;
5554 enum machine_mode mode;
5555
5556 /* Strip any NOPs that don't change the mode. */
5557 STRIP_NOPS (exp);
5558 code = TREE_CODE (exp);
5559
5560 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5561 if (code == NOP_EXPR
5562 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5563 return;
5564
5565 /* First check the type of the overall operation. We need only look at
5566 unary, binary and relational operations. */
5567 if (TREE_CODE_CLASS (code) == '1'
5568 || TREE_CODE_CLASS (code) == '2'
5569 || TREE_CODE_CLASS (code) == '<')
5570 {
5571 mode = TYPE_MODE (TREE_TYPE (exp));
5572 if (GET_MODE_CLASS (mode) == MODE_INT
5573 && mode > MAX_INTEGER_COMPUTATION_MODE)
5574 fatal ("unsupported wide integer operation");
5575 }
5576
5577 /* Check operand of a unary op. */
5578 if (TREE_CODE_CLASS (code) == '1')
5579 {
5580 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5581 if (GET_MODE_CLASS (mode) == MODE_INT
5582 && mode > MAX_INTEGER_COMPUTATION_MODE)
5583 fatal ("unsupported wide integer operation");
5584 }
5585
5586 /* Check operands of a binary/comparison op. */
5587 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5588 {
5589 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5590 if (GET_MODE_CLASS (mode) == MODE_INT
5591 && mode > MAX_INTEGER_COMPUTATION_MODE)
5592 fatal ("unsupported wide integer operation");
5593
5594 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5595 if (GET_MODE_CLASS (mode) == MODE_INT
5596 && mode > MAX_INTEGER_COMPUTATION_MODE)
5597 fatal ("unsupported wide integer operation");
5598 }
5599 }
5600 #endif
5601
5602 \f
5603 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5604 has any readonly fields. If any of the fields have types that
5605 contain readonly fields, return true as well. */
5606
5607 static int
5608 readonly_fields_p (type)
5609 tree type;
5610 {
5611 tree field;
5612
5613 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5614 if (TREE_CODE (field) == FIELD_DECL
5615 && (TREE_READONLY (field)
5616 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5617 && readonly_fields_p (TREE_TYPE (field)))))
5618 return 1;
5619
5620 return 0;
5621 }
5622 \f
5623 /* expand_expr: generate code for computing expression EXP.
5624 An rtx for the computed value is returned. The value is never null.
5625 In the case of a void EXP, const0_rtx is returned.
5626
5627 The value may be stored in TARGET if TARGET is nonzero.
5628 TARGET is just a suggestion; callers must assume that
5629 the rtx returned may not be the same as TARGET.
5630
5631 If TARGET is CONST0_RTX, it means that the value will be ignored.
5632
5633 If TMODE is not VOIDmode, it suggests generating the
5634 result in mode TMODE. But this is done only when convenient.
5635 Otherwise, TMODE is ignored and the value generated in its natural mode.
5636 TMODE is just a suggestion; callers must assume that
5637 the rtx returned may not have mode TMODE.
5638
5639 Note that TARGET may have neither TMODE nor MODE. In that case, it
5640 probably will not be used.
5641
5642 If MODIFIER is EXPAND_SUM then when EXP is an addition
5643 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5644 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5645 products as above, or REG or MEM, or constant.
5646 Ordinarily in such cases we would output mul or add instructions
5647 and then return a pseudo reg containing the sum.
5648
5649 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5650 it also marks a label as absolutely required (it can't be dead).
5651 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5652 This is used for outputting expressions used in initializers.
5653
5654 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5655 with a constant address even if that address is not normally legitimate.
5656 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5657
5658 rtx
5659 expand_expr (exp, target, tmode, modifier)
5660 register tree exp;
5661 rtx target;
5662 enum machine_mode tmode;
5663 enum expand_modifier modifier;
5664 {
5665 register rtx op0, op1, temp;
5666 tree type = TREE_TYPE (exp);
5667 int unsignedp = TREE_UNSIGNED (type);
5668 register enum machine_mode mode;
5669 register enum tree_code code = TREE_CODE (exp);
5670 optab this_optab;
5671 rtx subtarget, original_target;
5672 int ignore;
5673 tree context;
5674 /* Used by check-memory-usage to make modifier read only. */
5675 enum expand_modifier ro_modifier;
5676
5677 /* Handle ERROR_MARK before anybody tries to access its type. */
5678 if (TREE_CODE (exp) == ERROR_MARK)
5679 {
5680 op0 = CONST0_RTX (tmode);
5681 if (op0 != 0)
5682 return op0;
5683 return const0_rtx;
5684 }
5685
5686 mode = TYPE_MODE (type);
5687 /* Use subtarget as the target for operand 0 of a binary operation. */
5688 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5689 original_target = target;
5690 ignore = (target == const0_rtx
5691 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5692 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5693 || code == COND_EXPR)
5694 && TREE_CODE (type) == VOID_TYPE));
5695
5696 /* Make a read-only version of the modifier. */
5697 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5698 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5699 ro_modifier = modifier;
5700 else
5701 ro_modifier = EXPAND_NORMAL;
5702
5703 /* Don't use hard regs as subtargets, because the combiner
5704 can only handle pseudo regs. */
5705 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5706 subtarget = 0;
5707 /* Avoid subtargets inside loops,
5708 since they hide some invariant expressions. */
5709 if (preserve_subexpressions_p ())
5710 subtarget = 0;
5711
5712 /* If we are going to ignore this result, we need only do something
5713 if there is a side-effect somewhere in the expression. If there
5714 is, short-circuit the most common cases here. Note that we must
5715 not call expand_expr with anything but const0_rtx in case this
5716 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5717
5718 if (ignore)
5719 {
5720 if (! TREE_SIDE_EFFECTS (exp))
5721 return const0_rtx;
5722
5723 /* Ensure we reference a volatile object even if value is ignored, but
5724 don't do this if all we are doing is taking its address. */
5725 if (TREE_THIS_VOLATILE (exp)
5726 && TREE_CODE (exp) != FUNCTION_DECL
5727 && mode != VOIDmode && mode != BLKmode
5728 && modifier != EXPAND_CONST_ADDRESS)
5729 {
5730 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5731 if (GET_CODE (temp) == MEM)
5732 temp = copy_to_reg (temp);
5733 return const0_rtx;
5734 }
5735
5736 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5737 || code == INDIRECT_REF || code == BUFFER_REF)
5738 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5739 VOIDmode, ro_modifier);
5740 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5741 || code == ARRAY_REF)
5742 {
5743 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5744 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5745 return const0_rtx;
5746 }
5747 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5748 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5749 /* If the second operand has no side effects, just evaluate
5750 the first. */
5751 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5752 VOIDmode, ro_modifier);
5753 else if (code == BIT_FIELD_REF)
5754 {
5755 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5756 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5757 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5758 return const0_rtx;
5759 }
5760 ;
5761 target = 0;
5762 }
5763
5764 #ifdef MAX_INTEGER_COMPUTATION_MODE
5765 /* Only check stuff here if the mode we want is different from the mode
5766 of the expression; if it's the same, check_max_integer_computiation_mode
5767 will handle it. Do we really need to check this stuff at all? */
5768
5769 if (target
5770 && GET_MODE (target) != mode
5771 && TREE_CODE (exp) != INTEGER_CST
5772 && TREE_CODE (exp) != PARM_DECL
5773 && TREE_CODE (exp) != ARRAY_REF
5774 && TREE_CODE (exp) != COMPONENT_REF
5775 && TREE_CODE (exp) != BIT_FIELD_REF
5776 && TREE_CODE (exp) != INDIRECT_REF
5777 && TREE_CODE (exp) != CALL_EXPR
5778 && TREE_CODE (exp) != VAR_DECL
5779 && TREE_CODE (exp) != RTL_EXPR)
5780 {
5781 enum machine_mode mode = GET_MODE (target);
5782
5783 if (GET_MODE_CLASS (mode) == MODE_INT
5784 && mode > MAX_INTEGER_COMPUTATION_MODE)
5785 fatal ("unsupported wide integer operation");
5786 }
5787
5788 if (tmode != mode
5789 && TREE_CODE (exp) != INTEGER_CST
5790 && TREE_CODE (exp) != PARM_DECL
5791 && TREE_CODE (exp) != ARRAY_REF
5792 && TREE_CODE (exp) != COMPONENT_REF
5793 && TREE_CODE (exp) != BIT_FIELD_REF
5794 && TREE_CODE (exp) != INDIRECT_REF
5795 && TREE_CODE (exp) != VAR_DECL
5796 && TREE_CODE (exp) != CALL_EXPR
5797 && TREE_CODE (exp) != RTL_EXPR
5798 && GET_MODE_CLASS (tmode) == MODE_INT
5799 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5800 fatal ("unsupported wide integer operation");
5801
5802 check_max_integer_computation_mode (exp);
5803 #endif
5804
5805 /* If will do cse, generate all results into pseudo registers
5806 since 1) that allows cse to find more things
5807 and 2) otherwise cse could produce an insn the machine
5808 cannot support. */
5809
5810 if (! cse_not_expected && mode != BLKmode && target
5811 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5812 target = subtarget;
5813
5814 switch (code)
5815 {
5816 case LABEL_DECL:
5817 {
5818 tree function = decl_function_context (exp);
5819 /* Handle using a label in a containing function. */
5820 if (function != current_function_decl
5821 && function != inline_function_decl && function != 0)
5822 {
5823 struct function *p = find_function_data (function);
5824 /* Allocate in the memory associated with the function
5825 that the label is in. */
5826 push_obstacks (p->function_obstack,
5827 p->function_maybepermanent_obstack);
5828
5829 p->expr->x_forced_labels
5830 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5831 p->expr->x_forced_labels);
5832 pop_obstacks ();
5833 }
5834 else
5835 {
5836 if (modifier == EXPAND_INITIALIZER)
5837 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5838 label_rtx (exp),
5839 forced_labels);
5840 }
5841
5842 temp = gen_rtx_MEM (FUNCTION_MODE,
5843 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5844 if (function != current_function_decl
5845 && function != inline_function_decl && function != 0)
5846 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5847 return temp;
5848 }
5849
5850 case PARM_DECL:
5851 if (DECL_RTL (exp) == 0)
5852 {
5853 error_with_decl (exp, "prior parameter's size depends on `%s'");
5854 return CONST0_RTX (mode);
5855 }
5856
5857 /* ... fall through ... */
5858
5859 case VAR_DECL:
5860 /* If a static var's type was incomplete when the decl was written,
5861 but the type is complete now, lay out the decl now. */
5862 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5863 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5864 {
5865 push_obstacks_nochange ();
5866 end_temporary_allocation ();
5867 layout_decl (exp, 0);
5868 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5869 pop_obstacks ();
5870 }
5871
5872 /* Although static-storage variables start off initialized, according to
5873 ANSI C, a memcpy could overwrite them with uninitialized values. So
5874 we check them too. This also lets us check for read-only variables
5875 accessed via a non-const declaration, in case it won't be detected
5876 any other way (e.g., in an embedded system or OS kernel without
5877 memory protection).
5878
5879 Aggregates are not checked here; they're handled elsewhere. */
5880 if (cfun && current_function_check_memory_usage
5881 && code == VAR_DECL
5882 && GET_CODE (DECL_RTL (exp)) == MEM
5883 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5884 {
5885 enum memory_use_mode memory_usage;
5886 memory_usage = get_memory_usage_from_modifier (modifier);
5887
5888 if (memory_usage != MEMORY_USE_DONT)
5889 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5890 XEXP (DECL_RTL (exp), 0), Pmode,
5891 GEN_INT (int_size_in_bytes (type)),
5892 TYPE_MODE (sizetype),
5893 GEN_INT (memory_usage),
5894 TYPE_MODE (integer_type_node));
5895 }
5896
5897 /* ... fall through ... */
5898
5899 case FUNCTION_DECL:
5900 case RESULT_DECL:
5901 if (DECL_RTL (exp) == 0)
5902 abort ();
5903
5904 /* Ensure variable marked as used even if it doesn't go through
5905 a parser. If it hasn't be used yet, write out an external
5906 definition. */
5907 if (! TREE_USED (exp))
5908 {
5909 assemble_external (exp);
5910 TREE_USED (exp) = 1;
5911 }
5912
5913 /* Show we haven't gotten RTL for this yet. */
5914 temp = 0;
5915
5916 /* Handle variables inherited from containing functions. */
5917 context = decl_function_context (exp);
5918
5919 /* We treat inline_function_decl as an alias for the current function
5920 because that is the inline function whose vars, types, etc.
5921 are being merged into the current function.
5922 See expand_inline_function. */
5923
5924 if (context != 0 && context != current_function_decl
5925 && context != inline_function_decl
5926 /* If var is static, we don't need a static chain to access it. */
5927 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5928 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5929 {
5930 rtx addr;
5931
5932 /* Mark as non-local and addressable. */
5933 DECL_NONLOCAL (exp) = 1;
5934 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5935 abort ();
5936 mark_addressable (exp);
5937 if (GET_CODE (DECL_RTL (exp)) != MEM)
5938 abort ();
5939 addr = XEXP (DECL_RTL (exp), 0);
5940 if (GET_CODE (addr) == MEM)
5941 addr = gen_rtx_MEM (Pmode,
5942 fix_lexical_addr (XEXP (addr, 0), exp));
5943 else
5944 addr = fix_lexical_addr (addr, exp);
5945 temp = change_address (DECL_RTL (exp), mode, addr);
5946 }
5947
5948 /* This is the case of an array whose size is to be determined
5949 from its initializer, while the initializer is still being parsed.
5950 See expand_decl. */
5951
5952 else if (GET_CODE (DECL_RTL (exp)) == MEM
5953 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5954 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5955 XEXP (DECL_RTL (exp), 0));
5956
5957 /* If DECL_RTL is memory, we are in the normal case and either
5958 the address is not valid or it is not a register and -fforce-addr
5959 is specified, get the address into a register. */
5960
5961 else if (GET_CODE (DECL_RTL (exp)) == MEM
5962 && modifier != EXPAND_CONST_ADDRESS
5963 && modifier != EXPAND_SUM
5964 && modifier != EXPAND_INITIALIZER
5965 && (! memory_address_p (DECL_MODE (exp),
5966 XEXP (DECL_RTL (exp), 0))
5967 || (flag_force_addr
5968 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5969 temp = change_address (DECL_RTL (exp), VOIDmode,
5970 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5971
5972 /* If we got something, return it. But first, set the alignment
5973 the address is a register. */
5974 if (temp != 0)
5975 {
5976 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5977 mark_reg_pointer (XEXP (temp, 0),
5978 DECL_ALIGN (exp) / BITS_PER_UNIT);
5979
5980 return temp;
5981 }
5982
5983 /* If the mode of DECL_RTL does not match that of the decl, it
5984 must be a promoted value. We return a SUBREG of the wanted mode,
5985 but mark it so that we know that it was already extended. */
5986
5987 if (GET_CODE (DECL_RTL (exp)) == REG
5988 && GET_MODE (DECL_RTL (exp)) != mode)
5989 {
5990 /* Get the signedness used for this variable. Ensure we get the
5991 same mode we got when the variable was declared. */
5992 if (GET_MODE (DECL_RTL (exp))
5993 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5994 abort ();
5995
5996 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5997 SUBREG_PROMOTED_VAR_P (temp) = 1;
5998 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5999 return temp;
6000 }
6001
6002 return DECL_RTL (exp);
6003
6004 case INTEGER_CST:
6005 return immed_double_const (TREE_INT_CST_LOW (exp),
6006 TREE_INT_CST_HIGH (exp),
6007 mode);
6008
6009 case CONST_DECL:
6010 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6011 EXPAND_MEMORY_USE_BAD);
6012
6013 case REAL_CST:
6014 /* If optimized, generate immediate CONST_DOUBLE
6015 which will be turned into memory by reload if necessary.
6016
6017 We used to force a register so that loop.c could see it. But
6018 this does not allow gen_* patterns to perform optimizations with
6019 the constants. It also produces two insns in cases like "x = 1.0;".
6020 On most machines, floating-point constants are not permitted in
6021 many insns, so we'd end up copying it to a register in any case.
6022
6023 Now, we do the copying in expand_binop, if appropriate. */
6024 return immed_real_const (exp);
6025
6026 case COMPLEX_CST:
6027 case STRING_CST:
6028 if (! TREE_CST_RTL (exp))
6029 output_constant_def (exp);
6030
6031 /* TREE_CST_RTL probably contains a constant address.
6032 On RISC machines where a constant address isn't valid,
6033 make some insns to get that address into a register. */
6034 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6035 && modifier != EXPAND_CONST_ADDRESS
6036 && modifier != EXPAND_INITIALIZER
6037 && modifier != EXPAND_SUM
6038 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6039 || (flag_force_addr
6040 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6041 return change_address (TREE_CST_RTL (exp), VOIDmode,
6042 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6043 return TREE_CST_RTL (exp);
6044
6045 case EXPR_WITH_FILE_LOCATION:
6046 {
6047 rtx to_return;
6048 char *saved_input_filename = input_filename;
6049 int saved_lineno = lineno;
6050 input_filename = EXPR_WFL_FILENAME (exp);
6051 lineno = EXPR_WFL_LINENO (exp);
6052 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6053 emit_line_note (input_filename, lineno);
6054 /* Possibly avoid switching back and force here */
6055 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6056 input_filename = saved_input_filename;
6057 lineno = saved_lineno;
6058 return to_return;
6059 }
6060
6061 case SAVE_EXPR:
6062 context = decl_function_context (exp);
6063
6064 /* If this SAVE_EXPR was at global context, assume we are an
6065 initialization function and move it into our context. */
6066 if (context == 0)
6067 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6068
6069 /* We treat inline_function_decl as an alias for the current function
6070 because that is the inline function whose vars, types, etc.
6071 are being merged into the current function.
6072 See expand_inline_function. */
6073 if (context == current_function_decl || context == inline_function_decl)
6074 context = 0;
6075
6076 /* If this is non-local, handle it. */
6077 if (context)
6078 {
6079 /* The following call just exists to abort if the context is
6080 not of a containing function. */
6081 find_function_data (context);
6082
6083 temp = SAVE_EXPR_RTL (exp);
6084 if (temp && GET_CODE (temp) == REG)
6085 {
6086 put_var_into_stack (exp);
6087 temp = SAVE_EXPR_RTL (exp);
6088 }
6089 if (temp == 0 || GET_CODE (temp) != MEM)
6090 abort ();
6091 return change_address (temp, mode,
6092 fix_lexical_addr (XEXP (temp, 0), exp));
6093 }
6094 if (SAVE_EXPR_RTL (exp) == 0)
6095 {
6096 if (mode == VOIDmode)
6097 temp = const0_rtx;
6098 else
6099 temp = assign_temp (type, 3, 0, 0);
6100
6101 SAVE_EXPR_RTL (exp) = temp;
6102 if (!optimize && GET_CODE (temp) == REG)
6103 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6104 save_expr_regs);
6105
6106 /* If the mode of TEMP does not match that of the expression, it
6107 must be a promoted value. We pass store_expr a SUBREG of the
6108 wanted mode but mark it so that we know that it was already
6109 extended. Note that `unsignedp' was modified above in
6110 this case. */
6111
6112 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6113 {
6114 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6115 SUBREG_PROMOTED_VAR_P (temp) = 1;
6116 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6117 }
6118
6119 if (temp == const0_rtx)
6120 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6121 EXPAND_MEMORY_USE_BAD);
6122 else
6123 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6124
6125 TREE_USED (exp) = 1;
6126 }
6127
6128 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6129 must be a promoted value. We return a SUBREG of the wanted mode,
6130 but mark it so that we know that it was already extended. */
6131
6132 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6133 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6134 {
6135 /* Compute the signedness and make the proper SUBREG. */
6136 promote_mode (type, mode, &unsignedp, 0);
6137 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6138 SUBREG_PROMOTED_VAR_P (temp) = 1;
6139 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6140 return temp;
6141 }
6142
6143 return SAVE_EXPR_RTL (exp);
6144
6145 case UNSAVE_EXPR:
6146 {
6147 rtx temp;
6148 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6149 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6150 return temp;
6151 }
6152
6153 case PLACEHOLDER_EXPR:
6154 {
6155 tree placeholder_expr;
6156
6157 /* If there is an object on the head of the placeholder list,
6158 see if some object in it of type TYPE or a pointer to it. For
6159 further information, see tree.def. */
6160 for (placeholder_expr = placeholder_list;
6161 placeholder_expr != 0;
6162 placeholder_expr = TREE_CHAIN (placeholder_expr))
6163 {
6164 tree need_type = TYPE_MAIN_VARIANT (type);
6165 tree object = 0;
6166 tree old_list = placeholder_list;
6167 tree elt;
6168
6169 /* Find the outermost reference that is of the type we want.
6170 If none, see if any object has a type that is a pointer to
6171 the type we want. */
6172 for (elt = TREE_PURPOSE (placeholder_expr);
6173 elt != 0 && object == 0;
6174 elt
6175 = ((TREE_CODE (elt) == COMPOUND_EXPR
6176 || TREE_CODE (elt) == COND_EXPR)
6177 ? TREE_OPERAND (elt, 1)
6178 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6179 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6180 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6181 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6182 ? TREE_OPERAND (elt, 0) : 0))
6183 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6184 object = elt;
6185
6186 for (elt = TREE_PURPOSE (placeholder_expr);
6187 elt != 0 && object == 0;
6188 elt
6189 = ((TREE_CODE (elt) == COMPOUND_EXPR
6190 || TREE_CODE (elt) == COND_EXPR)
6191 ? TREE_OPERAND (elt, 1)
6192 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6193 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6194 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6195 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6196 ? TREE_OPERAND (elt, 0) : 0))
6197 if (POINTER_TYPE_P (TREE_TYPE (elt))
6198 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6199 == need_type))
6200 object = build1 (INDIRECT_REF, need_type, elt);
6201
6202 if (object != 0)
6203 {
6204 /* Expand this object skipping the list entries before
6205 it was found in case it is also a PLACEHOLDER_EXPR.
6206 In that case, we want to translate it using subsequent
6207 entries. */
6208 placeholder_list = TREE_CHAIN (placeholder_expr);
6209 temp = expand_expr (object, original_target, tmode,
6210 ro_modifier);
6211 placeholder_list = old_list;
6212 return temp;
6213 }
6214 }
6215 }
6216
6217 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6218 abort ();
6219
6220 case WITH_RECORD_EXPR:
6221 /* Put the object on the placeholder list, expand our first operand,
6222 and pop the list. */
6223 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6224 placeholder_list);
6225 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6226 tmode, ro_modifier);
6227 placeholder_list = TREE_CHAIN (placeholder_list);
6228 return target;
6229
6230 case GOTO_EXPR:
6231 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6232 expand_goto (TREE_OPERAND (exp, 0));
6233 else
6234 expand_computed_goto (TREE_OPERAND (exp, 0));
6235 return const0_rtx;
6236
6237 case EXIT_EXPR:
6238 expand_exit_loop_if_false (NULL_PTR,
6239 invert_truthvalue (TREE_OPERAND (exp, 0)));
6240 return const0_rtx;
6241
6242 case LABELED_BLOCK_EXPR:
6243 if (LABELED_BLOCK_BODY (exp))
6244 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6245 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6246 return const0_rtx;
6247
6248 case EXIT_BLOCK_EXPR:
6249 if (EXIT_BLOCK_RETURN (exp))
6250 sorry ("returned value in block_exit_expr");
6251 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6252 return const0_rtx;
6253
6254 case LOOP_EXPR:
6255 push_temp_slots ();
6256 expand_start_loop (1);
6257 expand_expr_stmt (TREE_OPERAND (exp, 0));
6258 expand_end_loop ();
6259 pop_temp_slots ();
6260
6261 return const0_rtx;
6262
6263 case BIND_EXPR:
6264 {
6265 tree vars = TREE_OPERAND (exp, 0);
6266 int vars_need_expansion = 0;
6267
6268 /* Need to open a binding contour here because
6269 if there are any cleanups they must be contained here. */
6270 expand_start_bindings (2);
6271
6272 /* Mark the corresponding BLOCK for output in its proper place. */
6273 if (TREE_OPERAND (exp, 2) != 0
6274 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6275 insert_block (TREE_OPERAND (exp, 2));
6276
6277 /* If VARS have not yet been expanded, expand them now. */
6278 while (vars)
6279 {
6280 if (DECL_RTL (vars) == 0)
6281 {
6282 vars_need_expansion = 1;
6283 expand_decl (vars);
6284 }
6285 expand_decl_init (vars);
6286 vars = TREE_CHAIN (vars);
6287 }
6288
6289 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6290
6291 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6292
6293 return temp;
6294 }
6295
6296 case RTL_EXPR:
6297 if (RTL_EXPR_SEQUENCE (exp))
6298 {
6299 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6300 abort ();
6301 emit_insns (RTL_EXPR_SEQUENCE (exp));
6302 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6303 }
6304 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6305 free_temps_for_rtl_expr (exp);
6306 return RTL_EXPR_RTL (exp);
6307
6308 case CONSTRUCTOR:
6309 /* If we don't need the result, just ensure we evaluate any
6310 subexpressions. */
6311 if (ignore)
6312 {
6313 tree elt;
6314 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6315 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6316 EXPAND_MEMORY_USE_BAD);
6317 return const0_rtx;
6318 }
6319
6320 /* All elts simple constants => refer to a constant in memory. But
6321 if this is a non-BLKmode mode, let it store a field at a time
6322 since that should make a CONST_INT or CONST_DOUBLE when we
6323 fold. Likewise, if we have a target we can use, it is best to
6324 store directly into the target unless the type is large enough
6325 that memcpy will be used. If we are making an initializer and
6326 all operands are constant, put it in memory as well. */
6327 else if ((TREE_STATIC (exp)
6328 && ((mode == BLKmode
6329 && ! (target != 0 && safe_from_p (target, exp, 1)))
6330 || TREE_ADDRESSABLE (exp)
6331 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6332 && (!MOVE_BY_PIECES_P
6333 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6334 TYPE_ALIGN (type) / BITS_PER_UNIT))
6335 && ! mostly_zeros_p (exp))))
6336 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6337 {
6338 rtx constructor = output_constant_def (exp);
6339 if (modifier != EXPAND_CONST_ADDRESS
6340 && modifier != EXPAND_INITIALIZER
6341 && modifier != EXPAND_SUM
6342 && (! memory_address_p (GET_MODE (constructor),
6343 XEXP (constructor, 0))
6344 || (flag_force_addr
6345 && GET_CODE (XEXP (constructor, 0)) != REG)))
6346 constructor = change_address (constructor, VOIDmode,
6347 XEXP (constructor, 0));
6348 return constructor;
6349 }
6350
6351 else
6352 {
6353 /* Handle calls that pass values in multiple non-contiguous
6354 locations. The Irix 6 ABI has examples of this. */
6355 if (target == 0 || ! safe_from_p (target, exp, 1)
6356 || GET_CODE (target) == PARALLEL)
6357 {
6358 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6359 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6360 else
6361 target = assign_temp (type, 0, 1, 1);
6362 }
6363
6364 if (TREE_READONLY (exp))
6365 {
6366 if (GET_CODE (target) == MEM)
6367 target = copy_rtx (target);
6368
6369 RTX_UNCHANGING_P (target) = 1;
6370 }
6371
6372 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6373 int_size_in_bytes (TREE_TYPE (exp)));
6374 return target;
6375 }
6376
6377 case INDIRECT_REF:
6378 {
6379 tree exp1 = TREE_OPERAND (exp, 0);
6380 tree exp2;
6381 tree index;
6382 tree string = string_constant (exp1, &index);
6383 int i;
6384
6385 /* Try to optimize reads from const strings. */
6386 if (string
6387 && TREE_CODE (string) == STRING_CST
6388 && TREE_CODE (index) == INTEGER_CST
6389 && !TREE_INT_CST_HIGH (index)
6390 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6391 && GET_MODE_CLASS (mode) == MODE_INT
6392 && GET_MODE_SIZE (mode) == 1
6393 && modifier != EXPAND_MEMORY_USE_WO)
6394 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6395
6396 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6397 op0 = memory_address (mode, op0);
6398
6399 if (cfun && current_function_check_memory_usage
6400 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6401 {
6402 enum memory_use_mode memory_usage;
6403 memory_usage = get_memory_usage_from_modifier (modifier);
6404
6405 if (memory_usage != MEMORY_USE_DONT)
6406 {
6407 in_check_memory_usage = 1;
6408 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6409 op0, Pmode,
6410 GEN_INT (int_size_in_bytes (type)),
6411 TYPE_MODE (sizetype),
6412 GEN_INT (memory_usage),
6413 TYPE_MODE (integer_type_node));
6414 in_check_memory_usage = 0;
6415 }
6416 }
6417
6418 temp = gen_rtx_MEM (mode, op0);
6419 /* If address was computed by addition,
6420 mark this as an element of an aggregate. */
6421 if (TREE_CODE (exp1) == PLUS_EXPR
6422 || (TREE_CODE (exp1) == SAVE_EXPR
6423 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6424 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6425 || (TREE_CODE (exp1) == ADDR_EXPR
6426 && (exp2 = TREE_OPERAND (exp1, 0))
6427 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6428 MEM_SET_IN_STRUCT_P (temp, 1);
6429
6430 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6431 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6432
6433 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6434 here, because, in C and C++, the fact that a location is accessed
6435 through a pointer to const does not mean that the value there can
6436 never change. Languages where it can never change should
6437 also set TREE_STATIC. */
6438 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6439
6440 /* If we are writing to this object and its type is a record with
6441 readonly fields, we must mark it as readonly so it will
6442 conflict with readonly references to those fields. */
6443 if (modifier == EXPAND_MEMORY_USE_WO
6444 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6445 RTX_UNCHANGING_P (temp) = 1;
6446
6447 return temp;
6448 }
6449
6450 case ARRAY_REF:
6451 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6452 abort ();
6453
6454 {
6455 tree array = TREE_OPERAND (exp, 0);
6456 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6457 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6458 tree index = TREE_OPERAND (exp, 1);
6459 tree index_type = TREE_TYPE (index);
6460 HOST_WIDE_INT i;
6461
6462 /* Optimize the special-case of a zero lower bound.
6463
6464 We convert the low_bound to sizetype to avoid some problems
6465 with constant folding. (E.g. suppose the lower bound is 1,
6466 and its mode is QI. Without the conversion, (ARRAY
6467 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6468 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6469
6470 But sizetype isn't quite right either (especially if
6471 the lowbound is negative). FIXME */
6472
6473 if (! integer_zerop (low_bound))
6474 index = fold (build (MINUS_EXPR, index_type, index,
6475 convert (sizetype, low_bound)));
6476
6477 /* Fold an expression like: "foo"[2].
6478 This is not done in fold so it won't happen inside &.
6479 Don't fold if this is for wide characters since it's too
6480 difficult to do correctly and this is a very rare case. */
6481
6482 if (TREE_CODE (array) == STRING_CST
6483 && TREE_CODE (index) == INTEGER_CST
6484 && !TREE_INT_CST_HIGH (index)
6485 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6486 && GET_MODE_CLASS (mode) == MODE_INT
6487 && GET_MODE_SIZE (mode) == 1)
6488 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6489
6490 /* If this is a constant index into a constant array,
6491 just get the value from the array. Handle both the cases when
6492 we have an explicit constructor and when our operand is a variable
6493 that was declared const. */
6494
6495 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6496 {
6497 if (TREE_CODE (index) == INTEGER_CST
6498 && TREE_INT_CST_HIGH (index) == 0)
6499 {
6500 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6501
6502 i = TREE_INT_CST_LOW (index);
6503 while (elem && i--)
6504 elem = TREE_CHAIN (elem);
6505 if (elem)
6506 return expand_expr (fold (TREE_VALUE (elem)), target,
6507 tmode, ro_modifier);
6508 }
6509 }
6510
6511 else if (optimize >= 1
6512 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6513 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6514 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6515 {
6516 if (TREE_CODE (index) == INTEGER_CST)
6517 {
6518 tree init = DECL_INITIAL (array);
6519
6520 i = TREE_INT_CST_LOW (index);
6521 if (TREE_CODE (init) == CONSTRUCTOR)
6522 {
6523 tree elem = CONSTRUCTOR_ELTS (init);
6524
6525 while (elem
6526 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6527 elem = TREE_CHAIN (elem);
6528 if (elem)
6529 return expand_expr (fold (TREE_VALUE (elem)), target,
6530 tmode, ro_modifier);
6531 }
6532 else if (TREE_CODE (init) == STRING_CST
6533 && TREE_INT_CST_HIGH (index) == 0
6534 && (TREE_INT_CST_LOW (index)
6535 < TREE_STRING_LENGTH (init)))
6536 return (GEN_INT
6537 (TREE_STRING_POINTER
6538 (init)[TREE_INT_CST_LOW (index)]));
6539 }
6540 }
6541 }
6542
6543 /* ... fall through ... */
6544
6545 case COMPONENT_REF:
6546 case BIT_FIELD_REF:
6547 /* If the operand is a CONSTRUCTOR, we can just extract the
6548 appropriate field if it is present. Don't do this if we have
6549 already written the data since we want to refer to that copy
6550 and varasm.c assumes that's what we'll do. */
6551 if (code != ARRAY_REF
6552 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6553 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6554 {
6555 tree elt;
6556
6557 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6558 elt = TREE_CHAIN (elt))
6559 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6560 /* We can normally use the value of the field in the
6561 CONSTRUCTOR. However, if this is a bitfield in
6562 an integral mode that we can fit in a HOST_WIDE_INT,
6563 we must mask only the number of bits in the bitfield,
6564 since this is done implicitly by the constructor. If
6565 the bitfield does not meet either of those conditions,
6566 we can't do this optimization. */
6567 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6568 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6569 == MODE_INT)
6570 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6571 <= HOST_BITS_PER_WIDE_INT))))
6572 {
6573 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6574 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6575 {
6576 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6577
6578 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6579 {
6580 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6581 op0 = expand_and (op0, op1, target);
6582 }
6583 else
6584 {
6585 enum machine_mode imode
6586 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6587 tree count
6588 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6589 0);
6590
6591 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6592 target, 0);
6593 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6594 target, 0);
6595 }
6596 }
6597
6598 return op0;
6599 }
6600 }
6601
6602 {
6603 enum machine_mode mode1;
6604 int bitsize;
6605 int bitpos;
6606 tree offset;
6607 int volatilep = 0;
6608 int alignment;
6609 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6610 &mode1, &unsignedp, &volatilep,
6611 &alignment);
6612
6613 /* If we got back the original object, something is wrong. Perhaps
6614 we are evaluating an expression too early. In any event, don't
6615 infinitely recurse. */
6616 if (tem == exp)
6617 abort ();
6618
6619 /* If TEM's type is a union of variable size, pass TARGET to the inner
6620 computation, since it will need a temporary and TARGET is known
6621 to have to do. This occurs in unchecked conversion in Ada. */
6622
6623 op0 = expand_expr (tem,
6624 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6625 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6626 != INTEGER_CST)
6627 ? target : NULL_RTX),
6628 VOIDmode,
6629 (modifier == EXPAND_INITIALIZER
6630 || modifier == EXPAND_CONST_ADDRESS)
6631 ? modifier : EXPAND_NORMAL);
6632
6633 /* If this is a constant, put it into a register if it is a
6634 legitimate constant and OFFSET is 0 and memory if it isn't. */
6635 if (CONSTANT_P (op0))
6636 {
6637 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6638 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6639 && offset == 0)
6640 op0 = force_reg (mode, op0);
6641 else
6642 op0 = validize_mem (force_const_mem (mode, op0));
6643 }
6644
6645 if (offset != 0)
6646 {
6647 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6648
6649 /* If this object is in memory, put it into a register.
6650 This case can't occur in C, but can in Ada if we have
6651 unchecked conversion of an expression from a scalar type to
6652 an array or record type. */
6653 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6654 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6655 {
6656 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6657
6658 mark_temp_addr_taken (memloc);
6659 emit_move_insn (memloc, op0);
6660 op0 = memloc;
6661 }
6662
6663 if (GET_CODE (op0) != MEM)
6664 abort ();
6665
6666 if (GET_MODE (offset_rtx) != ptr_mode)
6667 {
6668 #ifdef POINTERS_EXTEND_UNSIGNED
6669 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6670 #else
6671 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6672 #endif
6673 }
6674
6675 /* A constant address in OP0 can have VOIDmode, we must not try
6676 to call force_reg for that case. Avoid that case. */
6677 if (GET_CODE (op0) == MEM
6678 && GET_MODE (op0) == BLKmode
6679 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6680 && bitsize != 0
6681 && (bitpos % bitsize) == 0
6682 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6683 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6684 {
6685 rtx temp = change_address (op0, mode1,
6686 plus_constant (XEXP (op0, 0),
6687 (bitpos /
6688 BITS_PER_UNIT)));
6689 if (GET_CODE (XEXP (temp, 0)) == REG)
6690 op0 = temp;
6691 else
6692 op0 = change_address (op0, mode1,
6693 force_reg (GET_MODE (XEXP (temp, 0)),
6694 XEXP (temp, 0)));
6695 bitpos = 0;
6696 }
6697
6698
6699 op0 = change_address (op0, VOIDmode,
6700 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6701 force_reg (ptr_mode,
6702 offset_rtx)));
6703 }
6704
6705 /* Don't forget about volatility even if this is a bitfield. */
6706 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6707 {
6708 op0 = copy_rtx (op0);
6709 MEM_VOLATILE_P (op0) = 1;
6710 }
6711
6712 /* Check the access. */
6713 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6714 {
6715 enum memory_use_mode memory_usage;
6716 memory_usage = get_memory_usage_from_modifier (modifier);
6717
6718 if (memory_usage != MEMORY_USE_DONT)
6719 {
6720 rtx to;
6721 int size;
6722
6723 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6724 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6725
6726 /* Check the access right of the pointer. */
6727 if (size > BITS_PER_UNIT)
6728 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6729 to, Pmode,
6730 GEN_INT (size / BITS_PER_UNIT),
6731 TYPE_MODE (sizetype),
6732 GEN_INT (memory_usage),
6733 TYPE_MODE (integer_type_node));
6734 }
6735 }
6736
6737 /* In cases where an aligned union has an unaligned object
6738 as a field, we might be extracting a BLKmode value from
6739 an integer-mode (e.g., SImode) object. Handle this case
6740 by doing the extract into an object as wide as the field
6741 (which we know to be the width of a basic mode), then
6742 storing into memory, and changing the mode to BLKmode.
6743 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6744 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6745 if (mode1 == VOIDmode
6746 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6747 || (modifier != EXPAND_CONST_ADDRESS
6748 && modifier != EXPAND_INITIALIZER
6749 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6750 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6751 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6752 /* If the field isn't aligned enough to fetch as a memref,
6753 fetch it as a bit field. */
6754 || (mode1 != BLKmode && SLOW_UNALIGNED_ACCESS
6755 && ((TYPE_ALIGN (TREE_TYPE (tem))
6756 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6757 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
6758 || (modifier != EXPAND_CONST_ADDRESS
6759 && modifier != EXPAND_INITIALIZER
6760 && mode == BLKmode
6761 && SLOW_UNALIGNED_ACCESS
6762 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6763 || bitpos % TYPE_ALIGN (type) != 0)))
6764 {
6765 enum machine_mode ext_mode = mode;
6766
6767 if (ext_mode == BLKmode
6768 && ! (target != 0 && GET_CODE (op0) == MEM
6769 && GET_CODE (target) == MEM
6770 && bitpos % BITS_PER_UNIT == 0))
6771 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6772
6773 if (ext_mode == BLKmode)
6774 {
6775 /* In this case, BITPOS must start at a byte boundary and
6776 TARGET, if specified, must be a MEM. */
6777 if (GET_CODE (op0) != MEM
6778 || (target != 0 && GET_CODE (target) != MEM)
6779 || bitpos % BITS_PER_UNIT != 0)
6780 abort ();
6781
6782 op0 = change_address (op0, VOIDmode,
6783 plus_constant (XEXP (op0, 0),
6784 bitpos / BITS_PER_UNIT));
6785 if (target == 0)
6786 target = assign_temp (type, 0, 1, 1);
6787
6788 emit_block_move (target, op0,
6789 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6790 / BITS_PER_UNIT),
6791 1);
6792
6793 return target;
6794 }
6795
6796 op0 = validize_mem (op0);
6797
6798 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6799 mark_reg_pointer (XEXP (op0, 0), alignment);
6800
6801 op0 = extract_bit_field (op0, bitsize, bitpos,
6802 unsignedp, target, ext_mode, ext_mode,
6803 alignment,
6804 int_size_in_bytes (TREE_TYPE (tem)));
6805
6806 /* If the result is a record type and BITSIZE is narrower than
6807 the mode of OP0, an integral mode, and this is a big endian
6808 machine, we must put the field into the high-order bits. */
6809 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6810 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6811 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6812 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6813 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6814 - bitsize),
6815 op0, 1);
6816
6817 if (mode == BLKmode)
6818 {
6819 rtx new = assign_stack_temp (ext_mode,
6820 bitsize / BITS_PER_UNIT, 0);
6821
6822 emit_move_insn (new, op0);
6823 op0 = copy_rtx (new);
6824 PUT_MODE (op0, BLKmode);
6825 MEM_SET_IN_STRUCT_P (op0, 1);
6826 }
6827
6828 return op0;
6829 }
6830
6831 /* If the result is BLKmode, use that to access the object
6832 now as well. */
6833 if (mode == BLKmode)
6834 mode1 = BLKmode;
6835
6836 /* Get a reference to just this component. */
6837 if (modifier == EXPAND_CONST_ADDRESS
6838 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6839 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6840 (bitpos / BITS_PER_UNIT)));
6841 else
6842 op0 = change_address (op0, mode1,
6843 plus_constant (XEXP (op0, 0),
6844 (bitpos / BITS_PER_UNIT)));
6845
6846 if (GET_CODE (op0) == MEM)
6847 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6848
6849 if (GET_CODE (XEXP (op0, 0)) == REG)
6850 mark_reg_pointer (XEXP (op0, 0), alignment);
6851
6852 MEM_SET_IN_STRUCT_P (op0, 1);
6853 MEM_VOLATILE_P (op0) |= volatilep;
6854 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6855 || modifier == EXPAND_CONST_ADDRESS
6856 || modifier == EXPAND_INITIALIZER)
6857 return op0;
6858 else if (target == 0)
6859 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6860
6861 convert_move (target, op0, unsignedp);
6862 return target;
6863 }
6864
6865 /* Intended for a reference to a buffer of a file-object in Pascal.
6866 But it's not certain that a special tree code will really be
6867 necessary for these. INDIRECT_REF might work for them. */
6868 case BUFFER_REF:
6869 abort ();
6870
6871 case IN_EXPR:
6872 {
6873 /* Pascal set IN expression.
6874
6875 Algorithm:
6876 rlo = set_low - (set_low%bits_per_word);
6877 the_word = set [ (index - rlo)/bits_per_word ];
6878 bit_index = index % bits_per_word;
6879 bitmask = 1 << bit_index;
6880 return !!(the_word & bitmask); */
6881
6882 tree set = TREE_OPERAND (exp, 0);
6883 tree index = TREE_OPERAND (exp, 1);
6884 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6885 tree set_type = TREE_TYPE (set);
6886 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6887 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6888 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6889 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6890 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6891 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6892 rtx setaddr = XEXP (setval, 0);
6893 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6894 rtx rlow;
6895 rtx diff, quo, rem, addr, bit, result;
6896
6897 preexpand_calls (exp);
6898
6899 /* If domain is empty, answer is no. Likewise if index is constant
6900 and out of bounds. */
6901 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6902 && TREE_CODE (set_low_bound) == INTEGER_CST
6903 && tree_int_cst_lt (set_high_bound, set_low_bound))
6904 || (TREE_CODE (index) == INTEGER_CST
6905 && TREE_CODE (set_low_bound) == INTEGER_CST
6906 && tree_int_cst_lt (index, set_low_bound))
6907 || (TREE_CODE (set_high_bound) == INTEGER_CST
6908 && TREE_CODE (index) == INTEGER_CST
6909 && tree_int_cst_lt (set_high_bound, index))))
6910 return const0_rtx;
6911
6912 if (target == 0)
6913 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6914
6915 /* If we get here, we have to generate the code for both cases
6916 (in range and out of range). */
6917
6918 op0 = gen_label_rtx ();
6919 op1 = gen_label_rtx ();
6920
6921 if (! (GET_CODE (index_val) == CONST_INT
6922 && GET_CODE (lo_r) == CONST_INT))
6923 {
6924 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6925 GET_MODE (index_val), iunsignedp, 0, op1);
6926 }
6927
6928 if (! (GET_CODE (index_val) == CONST_INT
6929 && GET_CODE (hi_r) == CONST_INT))
6930 {
6931 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6932 GET_MODE (index_val), iunsignedp, 0, op1);
6933 }
6934
6935 /* Calculate the element number of bit zero in the first word
6936 of the set. */
6937 if (GET_CODE (lo_r) == CONST_INT)
6938 rlow = GEN_INT (INTVAL (lo_r)
6939 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6940 else
6941 rlow = expand_binop (index_mode, and_optab, lo_r,
6942 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6943 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6944
6945 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6946 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6947
6948 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6949 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6950 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6951 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6952
6953 addr = memory_address (byte_mode,
6954 expand_binop (index_mode, add_optab, diff,
6955 setaddr, NULL_RTX, iunsignedp,
6956 OPTAB_LIB_WIDEN));
6957
6958 /* Extract the bit we want to examine */
6959 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6960 gen_rtx_MEM (byte_mode, addr),
6961 make_tree (TREE_TYPE (index), rem),
6962 NULL_RTX, 1);
6963 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6964 GET_MODE (target) == byte_mode ? target : 0,
6965 1, OPTAB_LIB_WIDEN);
6966
6967 if (result != target)
6968 convert_move (target, result, 1);
6969
6970 /* Output the code to handle the out-of-range case. */
6971 emit_jump (op0);
6972 emit_label (op1);
6973 emit_move_insn (target, const0_rtx);
6974 emit_label (op0);
6975 return target;
6976 }
6977
6978 case WITH_CLEANUP_EXPR:
6979 if (RTL_EXPR_RTL (exp) == 0)
6980 {
6981 RTL_EXPR_RTL (exp)
6982 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6983 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6984
6985 /* That's it for this cleanup. */
6986 TREE_OPERAND (exp, 2) = 0;
6987 }
6988 return RTL_EXPR_RTL (exp);
6989
6990 case CLEANUP_POINT_EXPR:
6991 {
6992 /* Start a new binding layer that will keep track of all cleanup
6993 actions to be performed. */
6994 expand_start_bindings (2);
6995
6996 target_temp_slot_level = temp_slot_level;
6997
6998 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6999 /* If we're going to use this value, load it up now. */
7000 if (! ignore)
7001 op0 = force_not_mem (op0);
7002 preserve_temp_slots (op0);
7003 expand_end_bindings (NULL_TREE, 0, 0);
7004 }
7005 return op0;
7006
7007 case CALL_EXPR:
7008 /* Check for a built-in function. */
7009 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7010 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7011 == FUNCTION_DECL)
7012 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7013 return expand_builtin (exp, target, subtarget, tmode, ignore);
7014
7015 /* If this call was expanded already by preexpand_calls,
7016 just return the result we got. */
7017 if (CALL_EXPR_RTL (exp) != 0)
7018 return CALL_EXPR_RTL (exp);
7019
7020 return expand_call (exp, target, ignore);
7021
7022 case NON_LVALUE_EXPR:
7023 case NOP_EXPR:
7024 case CONVERT_EXPR:
7025 case REFERENCE_EXPR:
7026 if (TREE_CODE (type) == UNION_TYPE)
7027 {
7028 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7029
7030 /* If both input and output are BLKmode, this conversion
7031 isn't actually doing anything unless we need to make the
7032 alignment stricter. */
7033 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7034 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7035 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7036 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7037 modifier);
7038
7039 if (target == 0)
7040 {
7041 if (mode != BLKmode)
7042 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7043 else
7044 target = assign_temp (type, 0, 1, 1);
7045 }
7046
7047 if (GET_CODE (target) == MEM)
7048 /* Store data into beginning of memory target. */
7049 store_expr (TREE_OPERAND (exp, 0),
7050 change_address (target, TYPE_MODE (valtype), 0), 0);
7051
7052 else if (GET_CODE (target) == REG)
7053 /* Store this field into a union of the proper type. */
7054 store_field (target,
7055 MIN ((int_size_in_bytes (TREE_TYPE
7056 (TREE_OPERAND (exp, 0)))
7057 * BITS_PER_UNIT),
7058 GET_MODE_BITSIZE (mode)),
7059 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7060 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7061 else
7062 abort ();
7063
7064 /* Return the entire union. */
7065 return target;
7066 }
7067
7068 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7069 {
7070 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7071 ro_modifier);
7072
7073 /* If the signedness of the conversion differs and OP0 is
7074 a promoted SUBREG, clear that indication since we now
7075 have to do the proper extension. */
7076 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7077 && GET_CODE (op0) == SUBREG)
7078 SUBREG_PROMOTED_VAR_P (op0) = 0;
7079
7080 return op0;
7081 }
7082
7083 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7084 if (GET_MODE (op0) == mode)
7085 return op0;
7086
7087 /* If OP0 is a constant, just convert it into the proper mode. */
7088 if (CONSTANT_P (op0))
7089 return
7090 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7091 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7092
7093 if (modifier == EXPAND_INITIALIZER)
7094 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7095
7096 if (target == 0)
7097 return
7098 convert_to_mode (mode, op0,
7099 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7100 else
7101 convert_move (target, op0,
7102 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7103 return target;
7104
7105 case PLUS_EXPR:
7106 /* We come here from MINUS_EXPR when the second operand is a
7107 constant. */
7108 plus_expr:
7109 this_optab = add_optab;
7110
7111 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7112 something else, make sure we add the register to the constant and
7113 then to the other thing. This case can occur during strength
7114 reduction and doing it this way will produce better code if the
7115 frame pointer or argument pointer is eliminated.
7116
7117 fold-const.c will ensure that the constant is always in the inner
7118 PLUS_EXPR, so the only case we need to do anything about is if
7119 sp, ap, or fp is our second argument, in which case we must swap
7120 the innermost first argument and our second argument. */
7121
7122 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7123 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7124 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7125 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7126 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7127 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7128 {
7129 tree t = TREE_OPERAND (exp, 1);
7130
7131 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7132 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7133 }
7134
7135 /* If the result is to be ptr_mode and we are adding an integer to
7136 something, we might be forming a constant. So try to use
7137 plus_constant. If it produces a sum and we can't accept it,
7138 use force_operand. This allows P = &ARR[const] to generate
7139 efficient code on machines where a SYMBOL_REF is not a valid
7140 address.
7141
7142 If this is an EXPAND_SUM call, always return the sum. */
7143 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7144 || mode == ptr_mode)
7145 {
7146 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7147 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7148 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7149 {
7150 rtx constant_part;
7151
7152 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7153 EXPAND_SUM);
7154 /* Use immed_double_const to ensure that the constant is
7155 truncated according to the mode of OP1, then sign extended
7156 to a HOST_WIDE_INT. Using the constant directly can result
7157 in non-canonical RTL in a 64x32 cross compile. */
7158 constant_part
7159 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7160 (HOST_WIDE_INT) 0,
7161 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7162 op1 = plus_constant (op1, INTVAL (constant_part));
7163 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7164 op1 = force_operand (op1, target);
7165 return op1;
7166 }
7167
7168 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7169 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7170 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7171 {
7172 rtx constant_part;
7173
7174 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7175 EXPAND_SUM);
7176 if (! CONSTANT_P (op0))
7177 {
7178 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7179 VOIDmode, modifier);
7180 /* Don't go to both_summands if modifier
7181 says it's not right to return a PLUS. */
7182 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7183 goto binop2;
7184 goto both_summands;
7185 }
7186 /* Use immed_double_const to ensure that the constant is
7187 truncated according to the mode of OP1, then sign extended
7188 to a HOST_WIDE_INT. Using the constant directly can result
7189 in non-canonical RTL in a 64x32 cross compile. */
7190 constant_part
7191 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7192 (HOST_WIDE_INT) 0,
7193 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7194 op0 = plus_constant (op0, INTVAL (constant_part));
7195 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7196 op0 = force_operand (op0, target);
7197 return op0;
7198 }
7199 }
7200
7201 /* No sense saving up arithmetic to be done
7202 if it's all in the wrong mode to form part of an address.
7203 And force_operand won't know whether to sign-extend or
7204 zero-extend. */
7205 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7206 || mode != ptr_mode)
7207 goto binop;
7208
7209 preexpand_calls (exp);
7210 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7211 subtarget = 0;
7212
7213 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7214 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7215
7216 both_summands:
7217 /* Make sure any term that's a sum with a constant comes last. */
7218 if (GET_CODE (op0) == PLUS
7219 && CONSTANT_P (XEXP (op0, 1)))
7220 {
7221 temp = op0;
7222 op0 = op1;
7223 op1 = temp;
7224 }
7225 /* If adding to a sum including a constant,
7226 associate it to put the constant outside. */
7227 if (GET_CODE (op1) == PLUS
7228 && CONSTANT_P (XEXP (op1, 1)))
7229 {
7230 rtx constant_term = const0_rtx;
7231
7232 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7233 if (temp != 0)
7234 op0 = temp;
7235 /* Ensure that MULT comes first if there is one. */
7236 else if (GET_CODE (op0) == MULT)
7237 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7238 else
7239 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7240
7241 /* Let's also eliminate constants from op0 if possible. */
7242 op0 = eliminate_constant_term (op0, &constant_term);
7243
7244 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7245 their sum should be a constant. Form it into OP1, since the
7246 result we want will then be OP0 + OP1. */
7247
7248 temp = simplify_binary_operation (PLUS, mode, constant_term,
7249 XEXP (op1, 1));
7250 if (temp != 0)
7251 op1 = temp;
7252 else
7253 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7254 }
7255
7256 /* Put a constant term last and put a multiplication first. */
7257 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7258 temp = op1, op1 = op0, op0 = temp;
7259
7260 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7261 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7262
7263 case MINUS_EXPR:
7264 /* For initializers, we are allowed to return a MINUS of two
7265 symbolic constants. Here we handle all cases when both operands
7266 are constant. */
7267 /* Handle difference of two symbolic constants,
7268 for the sake of an initializer. */
7269 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7270 && really_constant_p (TREE_OPERAND (exp, 0))
7271 && really_constant_p (TREE_OPERAND (exp, 1)))
7272 {
7273 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7274 VOIDmode, ro_modifier);
7275 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7276 VOIDmode, ro_modifier);
7277
7278 /* If the last operand is a CONST_INT, use plus_constant of
7279 the negated constant. Else make the MINUS. */
7280 if (GET_CODE (op1) == CONST_INT)
7281 return plus_constant (op0, - INTVAL (op1));
7282 else
7283 return gen_rtx_MINUS (mode, op0, op1);
7284 }
7285 /* Convert A - const to A + (-const). */
7286 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7287 {
7288 tree negated = fold (build1 (NEGATE_EXPR, type,
7289 TREE_OPERAND (exp, 1)));
7290
7291 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7292 /* If we can't negate the constant in TYPE, leave it alone and
7293 expand_binop will negate it for us. We used to try to do it
7294 here in the signed version of TYPE, but that doesn't work
7295 on POINTER_TYPEs. */;
7296 else
7297 {
7298 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7299 goto plus_expr;
7300 }
7301 }
7302 this_optab = sub_optab;
7303 goto binop;
7304
7305 case MULT_EXPR:
7306 preexpand_calls (exp);
7307 /* If first operand is constant, swap them.
7308 Thus the following special case checks need only
7309 check the second operand. */
7310 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7311 {
7312 register tree t1 = TREE_OPERAND (exp, 0);
7313 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7314 TREE_OPERAND (exp, 1) = t1;
7315 }
7316
7317 /* Attempt to return something suitable for generating an
7318 indexed address, for machines that support that. */
7319
7320 if (modifier == EXPAND_SUM && mode == ptr_mode
7321 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7322 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7323 {
7324 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7325 EXPAND_SUM);
7326
7327 /* Apply distributive law if OP0 is x+c. */
7328 if (GET_CODE (op0) == PLUS
7329 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7330 return
7331 gen_rtx_PLUS
7332 (mode,
7333 gen_rtx_MULT
7334 (mode, XEXP (op0, 0),
7335 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7336 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7337 * INTVAL (XEXP (op0, 1))));
7338
7339 if (GET_CODE (op0) != REG)
7340 op0 = force_operand (op0, NULL_RTX);
7341 if (GET_CODE (op0) != REG)
7342 op0 = copy_to_mode_reg (mode, op0);
7343
7344 return
7345 gen_rtx_MULT (mode, op0,
7346 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7347 }
7348
7349 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7350 subtarget = 0;
7351
7352 /* Check for multiplying things that have been extended
7353 from a narrower type. If this machine supports multiplying
7354 in that narrower type with a result in the desired type,
7355 do it that way, and avoid the explicit type-conversion. */
7356 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7357 && TREE_CODE (type) == INTEGER_TYPE
7358 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7359 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7360 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7361 && int_fits_type_p (TREE_OPERAND (exp, 1),
7362 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7363 /* Don't use a widening multiply if a shift will do. */
7364 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7365 > HOST_BITS_PER_WIDE_INT)
7366 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7367 ||
7368 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7369 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7370 ==
7371 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7372 /* If both operands are extended, they must either both
7373 be zero-extended or both be sign-extended. */
7374 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7375 ==
7376 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7377 {
7378 enum machine_mode innermode
7379 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7380 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7381 ? smul_widen_optab : umul_widen_optab);
7382 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7383 ? umul_widen_optab : smul_widen_optab);
7384 if (mode == GET_MODE_WIDER_MODE (innermode))
7385 {
7386 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7387 {
7388 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7389 NULL_RTX, VOIDmode, 0);
7390 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7391 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7392 VOIDmode, 0);
7393 else
7394 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7395 NULL_RTX, VOIDmode, 0);
7396 goto binop2;
7397 }
7398 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7399 && innermode == word_mode)
7400 {
7401 rtx htem;
7402 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7403 NULL_RTX, VOIDmode, 0);
7404 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7405 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7406 VOIDmode, 0);
7407 else
7408 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7409 NULL_RTX, VOIDmode, 0);
7410 temp = expand_binop (mode, other_optab, op0, op1, target,
7411 unsignedp, OPTAB_LIB_WIDEN);
7412 htem = expand_mult_highpart_adjust (innermode,
7413 gen_highpart (innermode, temp),
7414 op0, op1,
7415 gen_highpart (innermode, temp),
7416 unsignedp);
7417 emit_move_insn (gen_highpart (innermode, temp), htem);
7418 return temp;
7419 }
7420 }
7421 }
7422 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7423 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7424 return expand_mult (mode, op0, op1, target, unsignedp);
7425
7426 case TRUNC_DIV_EXPR:
7427 case FLOOR_DIV_EXPR:
7428 case CEIL_DIV_EXPR:
7429 case ROUND_DIV_EXPR:
7430 case EXACT_DIV_EXPR:
7431 preexpand_calls (exp);
7432 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7433 subtarget = 0;
7434 /* Possible optimization: compute the dividend with EXPAND_SUM
7435 then if the divisor is constant can optimize the case
7436 where some terms of the dividend have coeffs divisible by it. */
7437 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7438 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7439 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7440
7441 case RDIV_EXPR:
7442 this_optab = flodiv_optab;
7443 goto binop;
7444
7445 case TRUNC_MOD_EXPR:
7446 case FLOOR_MOD_EXPR:
7447 case CEIL_MOD_EXPR:
7448 case ROUND_MOD_EXPR:
7449 preexpand_calls (exp);
7450 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7451 subtarget = 0;
7452 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7453 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7454 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7455
7456 case FIX_ROUND_EXPR:
7457 case FIX_FLOOR_EXPR:
7458 case FIX_CEIL_EXPR:
7459 abort (); /* Not used for C. */
7460
7461 case FIX_TRUNC_EXPR:
7462 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7463 if (target == 0)
7464 target = gen_reg_rtx (mode);
7465 expand_fix (target, op0, unsignedp);
7466 return target;
7467
7468 case FLOAT_EXPR:
7469 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7470 if (target == 0)
7471 target = gen_reg_rtx (mode);
7472 /* expand_float can't figure out what to do if FROM has VOIDmode.
7473 So give it the correct mode. With -O, cse will optimize this. */
7474 if (GET_MODE (op0) == VOIDmode)
7475 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7476 op0);
7477 expand_float (target, op0,
7478 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7479 return target;
7480
7481 case NEGATE_EXPR:
7482 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7483 temp = expand_unop (mode, neg_optab, op0, target, 0);
7484 if (temp == 0)
7485 abort ();
7486 return temp;
7487
7488 case ABS_EXPR:
7489 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7490
7491 /* Handle complex values specially. */
7492 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7493 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7494 return expand_complex_abs (mode, op0, target, unsignedp);
7495
7496 /* Unsigned abs is simply the operand. Testing here means we don't
7497 risk generating incorrect code below. */
7498 if (TREE_UNSIGNED (type))
7499 return op0;
7500
7501 return expand_abs (mode, op0, target,
7502 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7503
7504 case MAX_EXPR:
7505 case MIN_EXPR:
7506 target = original_target;
7507 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7508 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7509 || GET_MODE (target) != mode
7510 || (GET_CODE (target) == REG
7511 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7512 target = gen_reg_rtx (mode);
7513 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7514 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7515
7516 /* First try to do it with a special MIN or MAX instruction.
7517 If that does not win, use a conditional jump to select the proper
7518 value. */
7519 this_optab = (TREE_UNSIGNED (type)
7520 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7521 : (code == MIN_EXPR ? smin_optab : smax_optab));
7522
7523 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7524 OPTAB_WIDEN);
7525 if (temp != 0)
7526 return temp;
7527
7528 /* At this point, a MEM target is no longer useful; we will get better
7529 code without it. */
7530
7531 if (GET_CODE (target) == MEM)
7532 target = gen_reg_rtx (mode);
7533
7534 if (target != op0)
7535 emit_move_insn (target, op0);
7536
7537 op0 = gen_label_rtx ();
7538
7539 /* If this mode is an integer too wide to compare properly,
7540 compare word by word. Rely on cse to optimize constant cases. */
7541 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode, ccp_jump))
7542 {
7543 if (code == MAX_EXPR)
7544 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7545 target, op1, NULL_RTX, op0);
7546 else
7547 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7548 op1, target, NULL_RTX, op0);
7549 }
7550 else
7551 {
7552 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7553 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7554 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7555 op0);
7556 }
7557 emit_move_insn (target, op1);
7558 emit_label (op0);
7559 return target;
7560
7561 case BIT_NOT_EXPR:
7562 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7563 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7564 if (temp == 0)
7565 abort ();
7566 return temp;
7567
7568 case FFS_EXPR:
7569 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7570 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7571 if (temp == 0)
7572 abort ();
7573 return temp;
7574
7575 /* ??? Can optimize bitwise operations with one arg constant.
7576 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7577 and (a bitwise1 b) bitwise2 b (etc)
7578 but that is probably not worth while. */
7579
7580 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7581 boolean values when we want in all cases to compute both of them. In
7582 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7583 as actual zero-or-1 values and then bitwise anding. In cases where
7584 there cannot be any side effects, better code would be made by
7585 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7586 how to recognize those cases. */
7587
7588 case TRUTH_AND_EXPR:
7589 case BIT_AND_EXPR:
7590 this_optab = and_optab;
7591 goto binop;
7592
7593 case TRUTH_OR_EXPR:
7594 case BIT_IOR_EXPR:
7595 this_optab = ior_optab;
7596 goto binop;
7597
7598 case TRUTH_XOR_EXPR:
7599 case BIT_XOR_EXPR:
7600 this_optab = xor_optab;
7601 goto binop;
7602
7603 case LSHIFT_EXPR:
7604 case RSHIFT_EXPR:
7605 case LROTATE_EXPR:
7606 case RROTATE_EXPR:
7607 preexpand_calls (exp);
7608 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7609 subtarget = 0;
7610 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7611 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7612 unsignedp);
7613
7614 /* Could determine the answer when only additive constants differ. Also,
7615 the addition of one can be handled by changing the condition. */
7616 case LT_EXPR:
7617 case LE_EXPR:
7618 case GT_EXPR:
7619 case GE_EXPR:
7620 case EQ_EXPR:
7621 case NE_EXPR:
7622 preexpand_calls (exp);
7623 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7624 if (temp != 0)
7625 return temp;
7626
7627 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7628 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7629 && original_target
7630 && GET_CODE (original_target) == REG
7631 && (GET_MODE (original_target)
7632 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7633 {
7634 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7635 VOIDmode, 0);
7636
7637 if (temp != original_target)
7638 temp = copy_to_reg (temp);
7639
7640 op1 = gen_label_rtx ();
7641 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7642 GET_MODE (temp), unsignedp, 0, op1);
7643 emit_move_insn (temp, const1_rtx);
7644 emit_label (op1);
7645 return temp;
7646 }
7647
7648 /* If no set-flag instruction, must generate a conditional
7649 store into a temporary variable. Drop through
7650 and handle this like && and ||. */
7651
7652 case TRUTH_ANDIF_EXPR:
7653 case TRUTH_ORIF_EXPR:
7654 if (! ignore
7655 && (target == 0 || ! safe_from_p (target, exp, 1)
7656 /* Make sure we don't have a hard reg (such as function's return
7657 value) live across basic blocks, if not optimizing. */
7658 || (!optimize && GET_CODE (target) == REG
7659 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7660 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7661
7662 if (target)
7663 emit_clr_insn (target);
7664
7665 op1 = gen_label_rtx ();
7666 jumpifnot (exp, op1);
7667
7668 if (target)
7669 emit_0_to_1_insn (target);
7670
7671 emit_label (op1);
7672 return ignore ? const0_rtx : target;
7673
7674 case TRUTH_NOT_EXPR:
7675 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7676 /* The parser is careful to generate TRUTH_NOT_EXPR
7677 only with operands that are always zero or one. */
7678 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7679 target, 1, OPTAB_LIB_WIDEN);
7680 if (temp == 0)
7681 abort ();
7682 return temp;
7683
7684 case COMPOUND_EXPR:
7685 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7686 emit_queue ();
7687 return expand_expr (TREE_OPERAND (exp, 1),
7688 (ignore ? const0_rtx : target),
7689 VOIDmode, 0);
7690
7691 case COND_EXPR:
7692 /* If we would have a "singleton" (see below) were it not for a
7693 conversion in each arm, bring that conversion back out. */
7694 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7695 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7696 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7697 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7698 {
7699 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7700 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7701
7702 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7703 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7704 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7705 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7706 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7707 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7708 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7709 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7710 return expand_expr (build1 (NOP_EXPR, type,
7711 build (COND_EXPR, TREE_TYPE (true),
7712 TREE_OPERAND (exp, 0),
7713 true, false)),
7714 target, tmode, modifier);
7715 }
7716
7717 {
7718 /* Note that COND_EXPRs whose type is a structure or union
7719 are required to be constructed to contain assignments of
7720 a temporary variable, so that we can evaluate them here
7721 for side effect only. If type is void, we must do likewise. */
7722
7723 /* If an arm of the branch requires a cleanup,
7724 only that cleanup is performed. */
7725
7726 tree singleton = 0;
7727 tree binary_op = 0, unary_op = 0;
7728
7729 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7730 convert it to our mode, if necessary. */
7731 if (integer_onep (TREE_OPERAND (exp, 1))
7732 && integer_zerop (TREE_OPERAND (exp, 2))
7733 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7734 {
7735 if (ignore)
7736 {
7737 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7738 ro_modifier);
7739 return const0_rtx;
7740 }
7741
7742 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7743 if (GET_MODE (op0) == mode)
7744 return op0;
7745
7746 if (target == 0)
7747 target = gen_reg_rtx (mode);
7748 convert_move (target, op0, unsignedp);
7749 return target;
7750 }
7751
7752 /* Check for X ? A + B : A. If we have this, we can copy A to the
7753 output and conditionally add B. Similarly for unary operations.
7754 Don't do this if X has side-effects because those side effects
7755 might affect A or B and the "?" operation is a sequence point in
7756 ANSI. (operand_equal_p tests for side effects.) */
7757
7758 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7759 && operand_equal_p (TREE_OPERAND (exp, 2),
7760 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7761 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7762 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7763 && operand_equal_p (TREE_OPERAND (exp, 1),
7764 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7765 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7766 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7767 && operand_equal_p (TREE_OPERAND (exp, 2),
7768 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7769 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7770 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7771 && operand_equal_p (TREE_OPERAND (exp, 1),
7772 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7773 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7774
7775 /* If we are not to produce a result, we have no target. Otherwise,
7776 if a target was specified use it; it will not be used as an
7777 intermediate target unless it is safe. If no target, use a
7778 temporary. */
7779
7780 if (ignore)
7781 temp = 0;
7782 else if (original_target
7783 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7784 || (singleton && GET_CODE (original_target) == REG
7785 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7786 && original_target == var_rtx (singleton)))
7787 && GET_MODE (original_target) == mode
7788 #ifdef HAVE_conditional_move
7789 && (! can_conditionally_move_p (mode)
7790 || GET_CODE (original_target) == REG
7791 || TREE_ADDRESSABLE (type))
7792 #endif
7793 && ! (GET_CODE (original_target) == MEM
7794 && MEM_VOLATILE_P (original_target)))
7795 temp = original_target;
7796 else if (TREE_ADDRESSABLE (type))
7797 abort ();
7798 else
7799 temp = assign_temp (type, 0, 0, 1);
7800
7801 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7802 do the test of X as a store-flag operation, do this as
7803 A + ((X != 0) << log C). Similarly for other simple binary
7804 operators. Only do for C == 1 if BRANCH_COST is low. */
7805 if (temp && singleton && binary_op
7806 && (TREE_CODE (binary_op) == PLUS_EXPR
7807 || TREE_CODE (binary_op) == MINUS_EXPR
7808 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7809 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7810 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7811 : integer_onep (TREE_OPERAND (binary_op, 1)))
7812 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7813 {
7814 rtx result;
7815 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7816 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7817 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7818 : xor_optab);
7819
7820 /* If we had X ? A : A + 1, do this as A + (X == 0).
7821
7822 We have to invert the truth value here and then put it
7823 back later if do_store_flag fails. We cannot simply copy
7824 TREE_OPERAND (exp, 0) to another variable and modify that
7825 because invert_truthvalue can modify the tree pointed to
7826 by its argument. */
7827 if (singleton == TREE_OPERAND (exp, 1))
7828 TREE_OPERAND (exp, 0)
7829 = invert_truthvalue (TREE_OPERAND (exp, 0));
7830
7831 result = do_store_flag (TREE_OPERAND (exp, 0),
7832 (safe_from_p (temp, singleton, 1)
7833 ? temp : NULL_RTX),
7834 mode, BRANCH_COST <= 1);
7835
7836 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7837 result = expand_shift (LSHIFT_EXPR, mode, result,
7838 build_int_2 (tree_log2
7839 (TREE_OPERAND
7840 (binary_op, 1)),
7841 0),
7842 (safe_from_p (temp, singleton, 1)
7843 ? temp : NULL_RTX), 0);
7844
7845 if (result)
7846 {
7847 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7848 return expand_binop (mode, boptab, op1, result, temp,
7849 unsignedp, OPTAB_LIB_WIDEN);
7850 }
7851 else if (singleton == TREE_OPERAND (exp, 1))
7852 TREE_OPERAND (exp, 0)
7853 = invert_truthvalue (TREE_OPERAND (exp, 0));
7854 }
7855
7856 do_pending_stack_adjust ();
7857 NO_DEFER_POP;
7858 op0 = gen_label_rtx ();
7859
7860 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7861 {
7862 if (temp != 0)
7863 {
7864 /* If the target conflicts with the other operand of the
7865 binary op, we can't use it. Also, we can't use the target
7866 if it is a hard register, because evaluating the condition
7867 might clobber it. */
7868 if ((binary_op
7869 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7870 || (GET_CODE (temp) == REG
7871 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7872 temp = gen_reg_rtx (mode);
7873 store_expr (singleton, temp, 0);
7874 }
7875 else
7876 expand_expr (singleton,
7877 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7878 if (singleton == TREE_OPERAND (exp, 1))
7879 jumpif (TREE_OPERAND (exp, 0), op0);
7880 else
7881 jumpifnot (TREE_OPERAND (exp, 0), op0);
7882
7883 start_cleanup_deferral ();
7884 if (binary_op && temp == 0)
7885 /* Just touch the other operand. */
7886 expand_expr (TREE_OPERAND (binary_op, 1),
7887 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7888 else if (binary_op)
7889 store_expr (build (TREE_CODE (binary_op), type,
7890 make_tree (type, temp),
7891 TREE_OPERAND (binary_op, 1)),
7892 temp, 0);
7893 else
7894 store_expr (build1 (TREE_CODE (unary_op), type,
7895 make_tree (type, temp)),
7896 temp, 0);
7897 op1 = op0;
7898 }
7899 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7900 comparison operator. If we have one of these cases, set the
7901 output to A, branch on A (cse will merge these two references),
7902 then set the output to FOO. */
7903 else if (temp
7904 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7905 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7906 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7907 TREE_OPERAND (exp, 1), 0)
7908 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7909 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7910 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7911 {
7912 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7913 temp = gen_reg_rtx (mode);
7914 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7915 jumpif (TREE_OPERAND (exp, 0), op0);
7916
7917 start_cleanup_deferral ();
7918 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7919 op1 = op0;
7920 }
7921 else if (temp
7922 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7923 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7924 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7925 TREE_OPERAND (exp, 2), 0)
7926 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7927 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7928 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7929 {
7930 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7931 temp = gen_reg_rtx (mode);
7932 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7933 jumpifnot (TREE_OPERAND (exp, 0), op0);
7934
7935 start_cleanup_deferral ();
7936 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7937 op1 = op0;
7938 }
7939 else
7940 {
7941 op1 = gen_label_rtx ();
7942 jumpifnot (TREE_OPERAND (exp, 0), op0);
7943
7944 start_cleanup_deferral ();
7945
7946 /* One branch of the cond can be void, if it never returns. For
7947 example A ? throw : E */
7948 if (temp != 0
7949 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7950 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7951 else
7952 expand_expr (TREE_OPERAND (exp, 1),
7953 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7954 end_cleanup_deferral ();
7955 emit_queue ();
7956 emit_jump_insn (gen_jump (op1));
7957 emit_barrier ();
7958 emit_label (op0);
7959 start_cleanup_deferral ();
7960 if (temp != 0
7961 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7962 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7963 else
7964 expand_expr (TREE_OPERAND (exp, 2),
7965 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7966 }
7967
7968 end_cleanup_deferral ();
7969
7970 emit_queue ();
7971 emit_label (op1);
7972 OK_DEFER_POP;
7973
7974 return temp;
7975 }
7976
7977 case TARGET_EXPR:
7978 {
7979 /* Something needs to be initialized, but we didn't know
7980 where that thing was when building the tree. For example,
7981 it could be the return value of a function, or a parameter
7982 to a function which lays down in the stack, or a temporary
7983 variable which must be passed by reference.
7984
7985 We guarantee that the expression will either be constructed
7986 or copied into our original target. */
7987
7988 tree slot = TREE_OPERAND (exp, 0);
7989 tree cleanups = NULL_TREE;
7990 tree exp1;
7991
7992 if (TREE_CODE (slot) != VAR_DECL)
7993 abort ();
7994
7995 if (! ignore)
7996 target = original_target;
7997
7998 /* Set this here so that if we get a target that refers to a
7999 register variable that's already been used, put_reg_into_stack
8000 knows that it should fix up those uses. */
8001 TREE_USED (slot) = 1;
8002
8003 if (target == 0)
8004 {
8005 if (DECL_RTL (slot) != 0)
8006 {
8007 target = DECL_RTL (slot);
8008 /* If we have already expanded the slot, so don't do
8009 it again. (mrs) */
8010 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8011 return target;
8012 }
8013 else
8014 {
8015 target = assign_temp (type, 2, 0, 1);
8016 /* All temp slots at this level must not conflict. */
8017 preserve_temp_slots (target);
8018 DECL_RTL (slot) = target;
8019 if (TREE_ADDRESSABLE (slot))
8020 {
8021 TREE_ADDRESSABLE (slot) = 0;
8022 mark_addressable (slot);
8023 }
8024
8025 /* Since SLOT is not known to the called function
8026 to belong to its stack frame, we must build an explicit
8027 cleanup. This case occurs when we must build up a reference
8028 to pass the reference as an argument. In this case,
8029 it is very likely that such a reference need not be
8030 built here. */
8031
8032 if (TREE_OPERAND (exp, 2) == 0)
8033 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8034 cleanups = TREE_OPERAND (exp, 2);
8035 }
8036 }
8037 else
8038 {
8039 /* This case does occur, when expanding a parameter which
8040 needs to be constructed on the stack. The target
8041 is the actual stack address that we want to initialize.
8042 The function we call will perform the cleanup in this case. */
8043
8044 /* If we have already assigned it space, use that space,
8045 not target that we were passed in, as our target
8046 parameter is only a hint. */
8047 if (DECL_RTL (slot) != 0)
8048 {
8049 target = DECL_RTL (slot);
8050 /* If we have already expanded the slot, so don't do
8051 it again. (mrs) */
8052 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8053 return target;
8054 }
8055 else
8056 {
8057 DECL_RTL (slot) = target;
8058 /* If we must have an addressable slot, then make sure that
8059 the RTL that we just stored in slot is OK. */
8060 if (TREE_ADDRESSABLE (slot))
8061 {
8062 TREE_ADDRESSABLE (slot) = 0;
8063 mark_addressable (slot);
8064 }
8065 }
8066 }
8067
8068 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8069 /* Mark it as expanded. */
8070 TREE_OPERAND (exp, 1) = NULL_TREE;
8071
8072 store_expr (exp1, target, 0);
8073
8074 expand_decl_cleanup (NULL_TREE, cleanups);
8075
8076 return target;
8077 }
8078
8079 case INIT_EXPR:
8080 {
8081 tree lhs = TREE_OPERAND (exp, 0);
8082 tree rhs = TREE_OPERAND (exp, 1);
8083 tree noncopied_parts = 0;
8084 tree lhs_type = TREE_TYPE (lhs);
8085
8086 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8087 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8088 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8089 TYPE_NONCOPIED_PARTS (lhs_type));
8090 while (noncopied_parts != 0)
8091 {
8092 expand_assignment (TREE_VALUE (noncopied_parts),
8093 TREE_PURPOSE (noncopied_parts), 0, 0);
8094 noncopied_parts = TREE_CHAIN (noncopied_parts);
8095 }
8096 return temp;
8097 }
8098
8099 case MODIFY_EXPR:
8100 {
8101 /* If lhs is complex, expand calls in rhs before computing it.
8102 That's so we don't compute a pointer and save it over a call.
8103 If lhs is simple, compute it first so we can give it as a
8104 target if the rhs is just a call. This avoids an extra temp and copy
8105 and that prevents a partial-subsumption which makes bad code.
8106 Actually we could treat component_ref's of vars like vars. */
8107
8108 tree lhs = TREE_OPERAND (exp, 0);
8109 tree rhs = TREE_OPERAND (exp, 1);
8110 tree noncopied_parts = 0;
8111 tree lhs_type = TREE_TYPE (lhs);
8112
8113 temp = 0;
8114
8115 if (TREE_CODE (lhs) != VAR_DECL
8116 && TREE_CODE (lhs) != RESULT_DECL
8117 && TREE_CODE (lhs) != PARM_DECL
8118 && ! (TREE_CODE (lhs) == INDIRECT_REF
8119 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8120 preexpand_calls (exp);
8121
8122 /* Check for |= or &= of a bitfield of size one into another bitfield
8123 of size 1. In this case, (unless we need the result of the
8124 assignment) we can do this more efficiently with a
8125 test followed by an assignment, if necessary.
8126
8127 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8128 things change so we do, this code should be enhanced to
8129 support it. */
8130 if (ignore
8131 && TREE_CODE (lhs) == COMPONENT_REF
8132 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8133 || TREE_CODE (rhs) == BIT_AND_EXPR)
8134 && TREE_OPERAND (rhs, 0) == lhs
8135 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8136 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8137 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8138 {
8139 rtx label = gen_label_rtx ();
8140
8141 do_jump (TREE_OPERAND (rhs, 1),
8142 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8143 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8144 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8145 (TREE_CODE (rhs) == BIT_IOR_EXPR
8146 ? integer_one_node
8147 : integer_zero_node)),
8148 0, 0);
8149 do_pending_stack_adjust ();
8150 emit_label (label);
8151 return const0_rtx;
8152 }
8153
8154 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8155 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8156 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8157 TYPE_NONCOPIED_PARTS (lhs_type));
8158
8159 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8160 while (noncopied_parts != 0)
8161 {
8162 expand_assignment (TREE_PURPOSE (noncopied_parts),
8163 TREE_VALUE (noncopied_parts), 0, 0);
8164 noncopied_parts = TREE_CHAIN (noncopied_parts);
8165 }
8166 return temp;
8167 }
8168
8169 case RETURN_EXPR:
8170 if (!TREE_OPERAND (exp, 0))
8171 expand_null_return ();
8172 else
8173 expand_return (TREE_OPERAND (exp, 0));
8174 return const0_rtx;
8175
8176 case PREINCREMENT_EXPR:
8177 case PREDECREMENT_EXPR:
8178 return expand_increment (exp, 0, ignore);
8179
8180 case POSTINCREMENT_EXPR:
8181 case POSTDECREMENT_EXPR:
8182 /* Faster to treat as pre-increment if result is not used. */
8183 return expand_increment (exp, ! ignore, ignore);
8184
8185 case ADDR_EXPR:
8186 /* If nonzero, TEMP will be set to the address of something that might
8187 be a MEM corresponding to a stack slot. */
8188 temp = 0;
8189
8190 /* Are we taking the address of a nested function? */
8191 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8192 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8193 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8194 && ! TREE_STATIC (exp))
8195 {
8196 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8197 op0 = force_operand (op0, target);
8198 }
8199 /* If we are taking the address of something erroneous, just
8200 return a zero. */
8201 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8202 return const0_rtx;
8203 else
8204 {
8205 /* We make sure to pass const0_rtx down if we came in with
8206 ignore set, to avoid doing the cleanups twice for something. */
8207 op0 = expand_expr (TREE_OPERAND (exp, 0),
8208 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8209 (modifier == EXPAND_INITIALIZER
8210 ? modifier : EXPAND_CONST_ADDRESS));
8211
8212 /* If we are going to ignore the result, OP0 will have been set
8213 to const0_rtx, so just return it. Don't get confused and
8214 think we are taking the address of the constant. */
8215 if (ignore)
8216 return op0;
8217
8218 op0 = protect_from_queue (op0, 0);
8219
8220 /* We would like the object in memory. If it is a constant, we can
8221 have it be statically allocated into memory. For a non-constant,
8222 we need to allocate some memory and store the value into it. */
8223
8224 if (CONSTANT_P (op0))
8225 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8226 op0);
8227 else if (GET_CODE (op0) == MEM)
8228 {
8229 mark_temp_addr_taken (op0);
8230 temp = XEXP (op0, 0);
8231 }
8232
8233 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8234 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8235 {
8236 /* If this object is in a register, it must be not
8237 be BLKmode. */
8238 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8239 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8240
8241 mark_temp_addr_taken (memloc);
8242 emit_move_insn (memloc, op0);
8243 op0 = memloc;
8244 }
8245
8246 if (GET_CODE (op0) != MEM)
8247 abort ();
8248
8249 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8250 {
8251 temp = XEXP (op0, 0);
8252 #ifdef POINTERS_EXTEND_UNSIGNED
8253 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8254 && mode == ptr_mode)
8255 temp = convert_memory_address (ptr_mode, temp);
8256 #endif
8257 return temp;
8258 }
8259
8260 op0 = force_operand (XEXP (op0, 0), target);
8261 }
8262
8263 if (flag_force_addr && GET_CODE (op0) != REG)
8264 op0 = force_reg (Pmode, op0);
8265
8266 if (GET_CODE (op0) == REG
8267 && ! REG_USERVAR_P (op0))
8268 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8269
8270 /* If we might have had a temp slot, add an equivalent address
8271 for it. */
8272 if (temp != 0)
8273 update_temp_slot_address (temp, op0);
8274
8275 #ifdef POINTERS_EXTEND_UNSIGNED
8276 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8277 && mode == ptr_mode)
8278 op0 = convert_memory_address (ptr_mode, op0);
8279 #endif
8280
8281 return op0;
8282
8283 case ENTRY_VALUE_EXPR:
8284 abort ();
8285
8286 /* COMPLEX type for Extended Pascal & Fortran */
8287 case COMPLEX_EXPR:
8288 {
8289 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8290 rtx insns;
8291
8292 /* Get the rtx code of the operands. */
8293 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8294 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8295
8296 if (! target)
8297 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8298
8299 start_sequence ();
8300
8301 /* Move the real (op0) and imaginary (op1) parts to their location. */
8302 emit_move_insn (gen_realpart (mode, target), op0);
8303 emit_move_insn (gen_imagpart (mode, target), op1);
8304
8305 insns = get_insns ();
8306 end_sequence ();
8307
8308 /* Complex construction should appear as a single unit. */
8309 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8310 each with a separate pseudo as destination.
8311 It's not correct for flow to treat them as a unit. */
8312 if (GET_CODE (target) != CONCAT)
8313 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8314 else
8315 emit_insns (insns);
8316
8317 return target;
8318 }
8319
8320 case REALPART_EXPR:
8321 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8322 return gen_realpart (mode, op0);
8323
8324 case IMAGPART_EXPR:
8325 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8326 return gen_imagpart (mode, op0);
8327
8328 case CONJ_EXPR:
8329 {
8330 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8331 rtx imag_t;
8332 rtx insns;
8333
8334 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8335
8336 if (! target)
8337 target = gen_reg_rtx (mode);
8338
8339 start_sequence ();
8340
8341 /* Store the realpart and the negated imagpart to target. */
8342 emit_move_insn (gen_realpart (partmode, target),
8343 gen_realpart (partmode, op0));
8344
8345 imag_t = gen_imagpart (partmode, target);
8346 temp = expand_unop (partmode, neg_optab,
8347 gen_imagpart (partmode, op0), imag_t, 0);
8348 if (temp != imag_t)
8349 emit_move_insn (imag_t, temp);
8350
8351 insns = get_insns ();
8352 end_sequence ();
8353
8354 /* Conjugate should appear as a single unit
8355 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8356 each with a separate pseudo as destination.
8357 It's not correct for flow to treat them as a unit. */
8358 if (GET_CODE (target) != CONCAT)
8359 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8360 else
8361 emit_insns (insns);
8362
8363 return target;
8364 }
8365
8366 case TRY_CATCH_EXPR:
8367 {
8368 tree handler = TREE_OPERAND (exp, 1);
8369
8370 expand_eh_region_start ();
8371
8372 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8373
8374 expand_eh_region_end (handler);
8375
8376 return op0;
8377 }
8378
8379 case TRY_FINALLY_EXPR:
8380 {
8381 tree try_block = TREE_OPERAND (exp, 0);
8382 tree finally_block = TREE_OPERAND (exp, 1);
8383 rtx finally_label = gen_label_rtx ();
8384 rtx done_label = gen_label_rtx ();
8385 rtx return_link = gen_reg_rtx (Pmode);
8386 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8387 (tree) finally_label, (tree) return_link);
8388 TREE_SIDE_EFFECTS (cleanup) = 1;
8389
8390 /* Start a new binding layer that will keep track of all cleanup
8391 actions to be performed. */
8392 expand_start_bindings (2);
8393
8394 target_temp_slot_level = temp_slot_level;
8395
8396 expand_decl_cleanup (NULL_TREE, cleanup);
8397 op0 = expand_expr (try_block, target, tmode, modifier);
8398
8399 preserve_temp_slots (op0);
8400 expand_end_bindings (NULL_TREE, 0, 0);
8401 emit_jump (done_label);
8402 emit_label (finally_label);
8403 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8404 emit_indirect_jump (return_link);
8405 emit_label (done_label);
8406 return op0;
8407 }
8408
8409 case GOTO_SUBROUTINE_EXPR:
8410 {
8411 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8412 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8413 rtx return_address = gen_label_rtx ();
8414 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8415 emit_jump (subr);
8416 emit_label (return_address);
8417 return const0_rtx;
8418 }
8419
8420 case POPDCC_EXPR:
8421 {
8422 rtx dcc = get_dynamic_cleanup_chain ();
8423 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8424 return const0_rtx;
8425 }
8426
8427 case POPDHC_EXPR:
8428 {
8429 rtx dhc = get_dynamic_handler_chain ();
8430 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8431 return const0_rtx;
8432 }
8433
8434 case VA_ARG_EXPR:
8435 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8436
8437 default:
8438 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8439 }
8440
8441 /* Here to do an ordinary binary operator, generating an instruction
8442 from the optab already placed in `this_optab'. */
8443 binop:
8444 preexpand_calls (exp);
8445 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8446 subtarget = 0;
8447 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8448 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8449 binop2:
8450 temp = expand_binop (mode, this_optab, op0, op1, target,
8451 unsignedp, OPTAB_LIB_WIDEN);
8452 if (temp == 0)
8453 abort ();
8454 return temp;
8455 }
8456 \f
8457 /* Similar to expand_expr, except that we don't specify a target, target
8458 mode, or modifier and we return the alignment of the inner type. This is
8459 used in cases where it is not necessary to align the result to the
8460 alignment of its type as long as we know the alignment of the result, for
8461 example for comparisons of BLKmode values. */
8462
8463 static rtx
8464 expand_expr_unaligned (exp, palign)
8465 register tree exp;
8466 int *palign;
8467 {
8468 register rtx op0;
8469 tree type = TREE_TYPE (exp);
8470 register enum machine_mode mode = TYPE_MODE (type);
8471
8472 /* Default the alignment we return to that of the type. */
8473 *palign = TYPE_ALIGN (type);
8474
8475 /* The only cases in which we do anything special is if the resulting mode
8476 is BLKmode. */
8477 if (mode != BLKmode)
8478 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8479
8480 switch (TREE_CODE (exp))
8481 {
8482 case CONVERT_EXPR:
8483 case NOP_EXPR:
8484 case NON_LVALUE_EXPR:
8485 /* Conversions between BLKmode values don't change the underlying
8486 alignment or value. */
8487 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8488 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8489 break;
8490
8491 case ARRAY_REF:
8492 /* Much of the code for this case is copied directly from expand_expr.
8493 We need to duplicate it here because we will do something different
8494 in the fall-through case, so we need to handle the same exceptions
8495 it does. */
8496 {
8497 tree array = TREE_OPERAND (exp, 0);
8498 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8499 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8500 tree index = TREE_OPERAND (exp, 1);
8501 tree index_type = TREE_TYPE (index);
8502 HOST_WIDE_INT i;
8503
8504 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8505 abort ();
8506
8507 /* Optimize the special-case of a zero lower bound.
8508
8509 We convert the low_bound to sizetype to avoid some problems
8510 with constant folding. (E.g. suppose the lower bound is 1,
8511 and its mode is QI. Without the conversion, (ARRAY
8512 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8513 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
8514
8515 But sizetype isn't quite right either (especially if
8516 the lowbound is negative). FIXME */
8517
8518 if (! integer_zerop (low_bound))
8519 index = fold (build (MINUS_EXPR, index_type, index,
8520 convert (sizetype, low_bound)));
8521
8522 /* If this is a constant index into a constant array,
8523 just get the value from the array. Handle both the cases when
8524 we have an explicit constructor and when our operand is a variable
8525 that was declared const. */
8526
8527 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
8528 {
8529 if (TREE_CODE (index) == INTEGER_CST
8530 && TREE_INT_CST_HIGH (index) == 0)
8531 {
8532 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
8533
8534 i = TREE_INT_CST_LOW (index);
8535 while (elem && i--)
8536 elem = TREE_CHAIN (elem);
8537 if (elem)
8538 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8539 palign);
8540 }
8541 }
8542
8543 else if (optimize >= 1
8544 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8545 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8546 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8547 {
8548 if (TREE_CODE (index) == INTEGER_CST)
8549 {
8550 tree init = DECL_INITIAL (array);
8551
8552 i = TREE_INT_CST_LOW (index);
8553 if (TREE_CODE (init) == CONSTRUCTOR)
8554 {
8555 tree elem = CONSTRUCTOR_ELTS (init);
8556
8557 while (elem
8558 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
8559 elem = TREE_CHAIN (elem);
8560 if (elem)
8561 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8562 palign);
8563 }
8564 }
8565 }
8566 }
8567
8568 /* ... fall through ... */
8569
8570 case COMPONENT_REF:
8571 case BIT_FIELD_REF:
8572 /* If the operand is a CONSTRUCTOR, we can just extract the
8573 appropriate field if it is present. Don't do this if we have
8574 already written the data since we want to refer to that copy
8575 and varasm.c assumes that's what we'll do. */
8576 if (TREE_CODE (exp) != ARRAY_REF
8577 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8578 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8579 {
8580 tree elt;
8581
8582 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8583 elt = TREE_CHAIN (elt))
8584 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8585 /* Note that unlike the case in expand_expr, we know this is
8586 BLKmode and hence not an integer. */
8587 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8588 }
8589
8590 {
8591 enum machine_mode mode1;
8592 int bitsize;
8593 int bitpos;
8594 tree offset;
8595 int volatilep = 0;
8596 int alignment;
8597 int unsignedp;
8598 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8599 &mode1, &unsignedp, &volatilep,
8600 &alignment);
8601
8602 /* If we got back the original object, something is wrong. Perhaps
8603 we are evaluating an expression too early. In any event, don't
8604 infinitely recurse. */
8605 if (tem == exp)
8606 abort ();
8607
8608 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8609
8610 /* If this is a constant, put it into a register if it is a
8611 legitimate constant and OFFSET is 0 and memory if it isn't. */
8612 if (CONSTANT_P (op0))
8613 {
8614 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8615
8616 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8617 && offset == 0)
8618 op0 = force_reg (inner_mode, op0);
8619 else
8620 op0 = validize_mem (force_const_mem (inner_mode, op0));
8621 }
8622
8623 if (offset != 0)
8624 {
8625 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8626
8627 /* If this object is in a register, put it into memory.
8628 This case can't occur in C, but can in Ada if we have
8629 unchecked conversion of an expression from a scalar type to
8630 an array or record type. */
8631 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8632 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8633 {
8634 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8635
8636 mark_temp_addr_taken (memloc);
8637 emit_move_insn (memloc, op0);
8638 op0 = memloc;
8639 }
8640
8641 if (GET_CODE (op0) != MEM)
8642 abort ();
8643
8644 if (GET_MODE (offset_rtx) != ptr_mode)
8645 {
8646 #ifdef POINTERS_EXTEND_UNSIGNED
8647 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8648 #else
8649 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8650 #endif
8651 }
8652
8653 op0 = change_address (op0, VOIDmode,
8654 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8655 force_reg (ptr_mode,
8656 offset_rtx)));
8657 }
8658
8659 /* Don't forget about volatility even if this is a bitfield. */
8660 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8661 {
8662 op0 = copy_rtx (op0);
8663 MEM_VOLATILE_P (op0) = 1;
8664 }
8665
8666 /* Check the access. */
8667 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8668 {
8669 rtx to;
8670 int size;
8671
8672 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8673 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8674
8675 /* Check the access right of the pointer. */
8676 if (size > BITS_PER_UNIT)
8677 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8678 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8679 TYPE_MODE (sizetype),
8680 GEN_INT (MEMORY_USE_RO),
8681 TYPE_MODE (integer_type_node));
8682 }
8683
8684 /* In cases where an aligned union has an unaligned object
8685 as a field, we might be extracting a BLKmode value from
8686 an integer-mode (e.g., SImode) object. Handle this case
8687 by doing the extract into an object as wide as the field
8688 (which we know to be the width of a basic mode), then
8689 storing into memory, and changing the mode to BLKmode.
8690 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8691 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8692 if (mode1 == VOIDmode
8693 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8694 || (SLOW_UNALIGNED_ACCESS
8695 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8696 || bitpos % TYPE_ALIGN (type) != 0)))
8697 {
8698 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8699
8700 if (ext_mode == BLKmode)
8701 {
8702 /* In this case, BITPOS must start at a byte boundary. */
8703 if (GET_CODE (op0) != MEM
8704 || bitpos % BITS_PER_UNIT != 0)
8705 abort ();
8706
8707 op0 = change_address (op0, VOIDmode,
8708 plus_constant (XEXP (op0, 0),
8709 bitpos / BITS_PER_UNIT));
8710 }
8711 else
8712 {
8713 rtx new = assign_stack_temp (ext_mode,
8714 bitsize / BITS_PER_UNIT, 0);
8715
8716 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8717 unsignedp, NULL_RTX, ext_mode,
8718 ext_mode, alignment,
8719 int_size_in_bytes (TREE_TYPE (tem)));
8720
8721 /* If the result is a record type and BITSIZE is narrower than
8722 the mode of OP0, an integral mode, and this is a big endian
8723 machine, we must put the field into the high-order bits. */
8724 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8725 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8726 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8727 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8728 size_int (GET_MODE_BITSIZE
8729 (GET_MODE (op0))
8730 - bitsize),
8731 op0, 1);
8732
8733
8734 emit_move_insn (new, op0);
8735 op0 = copy_rtx (new);
8736 PUT_MODE (op0, BLKmode);
8737 }
8738 }
8739 else
8740 /* Get a reference to just this component. */
8741 op0 = change_address (op0, mode1,
8742 plus_constant (XEXP (op0, 0),
8743 (bitpos / BITS_PER_UNIT)));
8744
8745 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8746
8747 /* Adjust the alignment in case the bit position is not
8748 a multiple of the alignment of the inner object. */
8749 while (bitpos % alignment != 0)
8750 alignment >>= 1;
8751
8752 if (GET_CODE (XEXP (op0, 0)) == REG)
8753 mark_reg_pointer (XEXP (op0, 0), alignment);
8754
8755 MEM_IN_STRUCT_P (op0) = 1;
8756 MEM_VOLATILE_P (op0) |= volatilep;
8757
8758 *palign = alignment;
8759 return op0;
8760 }
8761
8762 default:
8763 break;
8764
8765 }
8766
8767 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8768 }
8769 \f
8770 /* Return the tree node and offset if a given argument corresponds to
8771 a string constant. */
8772
8773 tree
8774 string_constant (arg, ptr_offset)
8775 tree arg;
8776 tree *ptr_offset;
8777 {
8778 STRIP_NOPS (arg);
8779
8780 if (TREE_CODE (arg) == ADDR_EXPR
8781 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8782 {
8783 *ptr_offset = integer_zero_node;
8784 return TREE_OPERAND (arg, 0);
8785 }
8786 else if (TREE_CODE (arg) == PLUS_EXPR)
8787 {
8788 tree arg0 = TREE_OPERAND (arg, 0);
8789 tree arg1 = TREE_OPERAND (arg, 1);
8790
8791 STRIP_NOPS (arg0);
8792 STRIP_NOPS (arg1);
8793
8794 if (TREE_CODE (arg0) == ADDR_EXPR
8795 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8796 {
8797 *ptr_offset = arg1;
8798 return TREE_OPERAND (arg0, 0);
8799 }
8800 else if (TREE_CODE (arg1) == ADDR_EXPR
8801 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8802 {
8803 *ptr_offset = arg0;
8804 return TREE_OPERAND (arg1, 0);
8805 }
8806 }
8807
8808 return 0;
8809 }
8810 \f
8811 /* Expand code for a post- or pre- increment or decrement
8812 and return the RTX for the result.
8813 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8814
8815 static rtx
8816 expand_increment (exp, post, ignore)
8817 register tree exp;
8818 int post, ignore;
8819 {
8820 register rtx op0, op1;
8821 register rtx temp, value;
8822 register tree incremented = TREE_OPERAND (exp, 0);
8823 optab this_optab = add_optab;
8824 int icode;
8825 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8826 int op0_is_copy = 0;
8827 int single_insn = 0;
8828 /* 1 means we can't store into OP0 directly,
8829 because it is a subreg narrower than a word,
8830 and we don't dare clobber the rest of the word. */
8831 int bad_subreg = 0;
8832
8833 /* Stabilize any component ref that might need to be
8834 evaluated more than once below. */
8835 if (!post
8836 || TREE_CODE (incremented) == BIT_FIELD_REF
8837 || (TREE_CODE (incremented) == COMPONENT_REF
8838 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8839 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8840 incremented = stabilize_reference (incremented);
8841 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8842 ones into save exprs so that they don't accidentally get evaluated
8843 more than once by the code below. */
8844 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8845 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8846 incremented = save_expr (incremented);
8847
8848 /* Compute the operands as RTX.
8849 Note whether OP0 is the actual lvalue or a copy of it:
8850 I believe it is a copy iff it is a register or subreg
8851 and insns were generated in computing it. */
8852
8853 temp = get_last_insn ();
8854 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8855
8856 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8857 in place but instead must do sign- or zero-extension during assignment,
8858 so we copy it into a new register and let the code below use it as
8859 a copy.
8860
8861 Note that we can safely modify this SUBREG since it is know not to be
8862 shared (it was made by the expand_expr call above). */
8863
8864 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8865 {
8866 if (post)
8867 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8868 else
8869 bad_subreg = 1;
8870 }
8871 else if (GET_CODE (op0) == SUBREG
8872 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8873 {
8874 /* We cannot increment this SUBREG in place. If we are
8875 post-incrementing, get a copy of the old value. Otherwise,
8876 just mark that we cannot increment in place. */
8877 if (post)
8878 op0 = copy_to_reg (op0);
8879 else
8880 bad_subreg = 1;
8881 }
8882
8883 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8884 && temp != get_last_insn ());
8885 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8886 EXPAND_MEMORY_USE_BAD);
8887
8888 /* Decide whether incrementing or decrementing. */
8889 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8890 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8891 this_optab = sub_optab;
8892
8893 /* Convert decrement by a constant into a negative increment. */
8894 if (this_optab == sub_optab
8895 && GET_CODE (op1) == CONST_INT)
8896 {
8897 op1 = GEN_INT (- INTVAL (op1));
8898 this_optab = add_optab;
8899 }
8900
8901 /* For a preincrement, see if we can do this with a single instruction. */
8902 if (!post)
8903 {
8904 icode = (int) this_optab->handlers[(int) mode].insn_code;
8905 if (icode != (int) CODE_FOR_nothing
8906 /* Make sure that OP0 is valid for operands 0 and 1
8907 of the insn we want to queue. */
8908 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8909 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8910 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8911 single_insn = 1;
8912 }
8913
8914 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8915 then we cannot just increment OP0. We must therefore contrive to
8916 increment the original value. Then, for postincrement, we can return
8917 OP0 since it is a copy of the old value. For preincrement, expand here
8918 unless we can do it with a single insn.
8919
8920 Likewise if storing directly into OP0 would clobber high bits
8921 we need to preserve (bad_subreg). */
8922 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8923 {
8924 /* This is the easiest way to increment the value wherever it is.
8925 Problems with multiple evaluation of INCREMENTED are prevented
8926 because either (1) it is a component_ref or preincrement,
8927 in which case it was stabilized above, or (2) it is an array_ref
8928 with constant index in an array in a register, which is
8929 safe to reevaluate. */
8930 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8931 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8932 ? MINUS_EXPR : PLUS_EXPR),
8933 TREE_TYPE (exp),
8934 incremented,
8935 TREE_OPERAND (exp, 1));
8936
8937 while (TREE_CODE (incremented) == NOP_EXPR
8938 || TREE_CODE (incremented) == CONVERT_EXPR)
8939 {
8940 newexp = convert (TREE_TYPE (incremented), newexp);
8941 incremented = TREE_OPERAND (incremented, 0);
8942 }
8943
8944 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8945 return post ? op0 : temp;
8946 }
8947
8948 if (post)
8949 {
8950 /* We have a true reference to the value in OP0.
8951 If there is an insn to add or subtract in this mode, queue it.
8952 Queueing the increment insn avoids the register shuffling
8953 that often results if we must increment now and first save
8954 the old value for subsequent use. */
8955
8956 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8957 op0 = stabilize (op0);
8958 #endif
8959
8960 icode = (int) this_optab->handlers[(int) mode].insn_code;
8961 if (icode != (int) CODE_FOR_nothing
8962 /* Make sure that OP0 is valid for operands 0 and 1
8963 of the insn we want to queue. */
8964 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8965 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8966 {
8967 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8968 op1 = force_reg (mode, op1);
8969
8970 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8971 }
8972 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8973 {
8974 rtx addr = (general_operand (XEXP (op0, 0), mode)
8975 ? force_reg (Pmode, XEXP (op0, 0))
8976 : copy_to_reg (XEXP (op0, 0)));
8977 rtx temp, result;
8978
8979 op0 = change_address (op0, VOIDmode, addr);
8980 temp = force_reg (GET_MODE (op0), op0);
8981 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8982 op1 = force_reg (mode, op1);
8983
8984 /* The increment queue is LIFO, thus we have to `queue'
8985 the instructions in reverse order. */
8986 enqueue_insn (op0, gen_move_insn (op0, temp));
8987 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8988 return result;
8989 }
8990 }
8991
8992 /* Preincrement, or we can't increment with one simple insn. */
8993 if (post)
8994 /* Save a copy of the value before inc or dec, to return it later. */
8995 temp = value = copy_to_reg (op0);
8996 else
8997 /* Arrange to return the incremented value. */
8998 /* Copy the rtx because expand_binop will protect from the queue,
8999 and the results of that would be invalid for us to return
9000 if our caller does emit_queue before using our result. */
9001 temp = copy_rtx (value = op0);
9002
9003 /* Increment however we can. */
9004 op1 = expand_binop (mode, this_optab, value, op1,
9005 current_function_check_memory_usage ? NULL_RTX : op0,
9006 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9007 /* Make sure the value is stored into OP0. */
9008 if (op1 != op0)
9009 emit_move_insn (op0, op1);
9010
9011 return temp;
9012 }
9013 \f
9014 /* Expand all function calls contained within EXP, innermost ones first.
9015 But don't look within expressions that have sequence points.
9016 For each CALL_EXPR, record the rtx for its value
9017 in the CALL_EXPR_RTL field. */
9018
9019 static void
9020 preexpand_calls (exp)
9021 tree exp;
9022 {
9023 register int nops, i;
9024 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9025
9026 if (! do_preexpand_calls)
9027 return;
9028
9029 /* Only expressions and references can contain calls. */
9030
9031 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9032 return;
9033
9034 switch (TREE_CODE (exp))
9035 {
9036 case CALL_EXPR:
9037 /* Do nothing if already expanded. */
9038 if (CALL_EXPR_RTL (exp) != 0
9039 /* Do nothing if the call returns a variable-sized object. */
9040 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9041 /* Do nothing to built-in functions. */
9042 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9043 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9044 == FUNCTION_DECL)
9045 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9046 return;
9047
9048 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9049 return;
9050
9051 case COMPOUND_EXPR:
9052 case COND_EXPR:
9053 case TRUTH_ANDIF_EXPR:
9054 case TRUTH_ORIF_EXPR:
9055 /* If we find one of these, then we can be sure
9056 the adjust will be done for it (since it makes jumps).
9057 Do it now, so that if this is inside an argument
9058 of a function, we don't get the stack adjustment
9059 after some other args have already been pushed. */
9060 do_pending_stack_adjust ();
9061 return;
9062
9063 case BLOCK:
9064 case RTL_EXPR:
9065 case WITH_CLEANUP_EXPR:
9066 case CLEANUP_POINT_EXPR:
9067 case TRY_CATCH_EXPR:
9068 return;
9069
9070 case SAVE_EXPR:
9071 if (SAVE_EXPR_RTL (exp) != 0)
9072 return;
9073
9074 default:
9075 break;
9076 }
9077
9078 nops = tree_code_length[(int) TREE_CODE (exp)];
9079 for (i = 0; i < nops; i++)
9080 if (TREE_OPERAND (exp, i) != 0)
9081 {
9082 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9083 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9084 It doesn't happen before the call is made. */
9085 ;
9086 else
9087 {
9088 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9089 if (type == 'e' || type == '<' || type == '1' || type == '2'
9090 || type == 'r')
9091 preexpand_calls (TREE_OPERAND (exp, i));
9092 }
9093 }
9094 }
9095 \f
9096 /* At the start of a function, record that we have no previously-pushed
9097 arguments waiting to be popped. */
9098
9099 void
9100 init_pending_stack_adjust ()
9101 {
9102 pending_stack_adjust = 0;
9103 }
9104
9105 /* When exiting from function, if safe, clear out any pending stack adjust
9106 so the adjustment won't get done.
9107
9108 Note, if the current function calls alloca, then it must have a
9109 frame pointer regardless of the value of flag_omit_frame_pointer. */
9110
9111 void
9112 clear_pending_stack_adjust ()
9113 {
9114 #ifdef EXIT_IGNORE_STACK
9115 if (optimize > 0
9116 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9117 && EXIT_IGNORE_STACK
9118 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9119 && ! flag_inline_functions)
9120 pending_stack_adjust = 0;
9121 #endif
9122 }
9123
9124 /* Pop any previously-pushed arguments that have not been popped yet. */
9125
9126 void
9127 do_pending_stack_adjust ()
9128 {
9129 if (inhibit_defer_pop == 0)
9130 {
9131 if (pending_stack_adjust != 0)
9132 adjust_stack (GEN_INT (pending_stack_adjust));
9133 pending_stack_adjust = 0;
9134 }
9135 }
9136 \f
9137 /* Expand conditional expressions. */
9138
9139 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9140 LABEL is an rtx of code CODE_LABEL, in this function and all the
9141 functions here. */
9142
9143 void
9144 jumpifnot (exp, label)
9145 tree exp;
9146 rtx label;
9147 {
9148 do_jump (exp, label, NULL_RTX);
9149 }
9150
9151 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9152
9153 void
9154 jumpif (exp, label)
9155 tree exp;
9156 rtx label;
9157 {
9158 do_jump (exp, NULL_RTX, label);
9159 }
9160
9161 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9162 the result is zero, or IF_TRUE_LABEL if the result is one.
9163 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9164 meaning fall through in that case.
9165
9166 do_jump always does any pending stack adjust except when it does not
9167 actually perform a jump. An example where there is no jump
9168 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9169
9170 This function is responsible for optimizing cases such as
9171 &&, || and comparison operators in EXP. */
9172
9173 void
9174 do_jump (exp, if_false_label, if_true_label)
9175 tree exp;
9176 rtx if_false_label, if_true_label;
9177 {
9178 register enum tree_code code = TREE_CODE (exp);
9179 /* Some cases need to create a label to jump to
9180 in order to properly fall through.
9181 These cases set DROP_THROUGH_LABEL nonzero. */
9182 rtx drop_through_label = 0;
9183 rtx temp;
9184 int i;
9185 tree type;
9186 enum machine_mode mode;
9187
9188 #ifdef MAX_INTEGER_COMPUTATION_MODE
9189 check_max_integer_computation_mode (exp);
9190 #endif
9191
9192 emit_queue ();
9193
9194 switch (code)
9195 {
9196 case ERROR_MARK:
9197 break;
9198
9199 case INTEGER_CST:
9200 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9201 if (temp)
9202 emit_jump (temp);
9203 break;
9204
9205 #if 0
9206 /* This is not true with #pragma weak */
9207 case ADDR_EXPR:
9208 /* The address of something can never be zero. */
9209 if (if_true_label)
9210 emit_jump (if_true_label);
9211 break;
9212 #endif
9213
9214 case NOP_EXPR:
9215 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9216 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9217 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9218 goto normal;
9219 case CONVERT_EXPR:
9220 /* If we are narrowing the operand, we have to do the compare in the
9221 narrower mode. */
9222 if ((TYPE_PRECISION (TREE_TYPE (exp))
9223 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9224 goto normal;
9225 case NON_LVALUE_EXPR:
9226 case REFERENCE_EXPR:
9227 case ABS_EXPR:
9228 case NEGATE_EXPR:
9229 case LROTATE_EXPR:
9230 case RROTATE_EXPR:
9231 /* These cannot change zero->non-zero or vice versa. */
9232 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9233 break;
9234
9235 case WITH_RECORD_EXPR:
9236 /* Put the object on the placeholder list, recurse through our first
9237 operand, and pop the list. */
9238 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9239 placeholder_list);
9240 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9241 placeholder_list = TREE_CHAIN (placeholder_list);
9242 break;
9243
9244 #if 0
9245 /* This is never less insns than evaluating the PLUS_EXPR followed by
9246 a test and can be longer if the test is eliminated. */
9247 case PLUS_EXPR:
9248 /* Reduce to minus. */
9249 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9250 TREE_OPERAND (exp, 0),
9251 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9252 TREE_OPERAND (exp, 1))));
9253 /* Process as MINUS. */
9254 #endif
9255
9256 case MINUS_EXPR:
9257 /* Non-zero iff operands of minus differ. */
9258 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9259 TREE_OPERAND (exp, 0),
9260 TREE_OPERAND (exp, 1)),
9261 NE, NE, if_false_label, if_true_label);
9262 break;
9263
9264 case BIT_AND_EXPR:
9265 /* If we are AND'ing with a small constant, do this comparison in the
9266 smallest type that fits. If the machine doesn't have comparisons
9267 that small, it will be converted back to the wider comparison.
9268 This helps if we are testing the sign bit of a narrower object.
9269 combine can't do this for us because it can't know whether a
9270 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9271
9272 if (! SLOW_BYTE_ACCESS
9273 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9274 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9275 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9276 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9277 && (type = type_for_mode (mode, 1)) != 0
9278 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9279 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9280 != CODE_FOR_nothing))
9281 {
9282 do_jump (convert (type, exp), if_false_label, if_true_label);
9283 break;
9284 }
9285 goto normal;
9286
9287 case TRUTH_NOT_EXPR:
9288 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9289 break;
9290
9291 case TRUTH_ANDIF_EXPR:
9292 if (if_false_label == 0)
9293 if_false_label = drop_through_label = gen_label_rtx ();
9294 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9295 start_cleanup_deferral ();
9296 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9297 end_cleanup_deferral ();
9298 break;
9299
9300 case TRUTH_ORIF_EXPR:
9301 if (if_true_label == 0)
9302 if_true_label = drop_through_label = gen_label_rtx ();
9303 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9304 start_cleanup_deferral ();
9305 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9306 end_cleanup_deferral ();
9307 break;
9308
9309 case COMPOUND_EXPR:
9310 push_temp_slots ();
9311 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9312 preserve_temp_slots (NULL_RTX);
9313 free_temp_slots ();
9314 pop_temp_slots ();
9315 emit_queue ();
9316 do_pending_stack_adjust ();
9317 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9318 break;
9319
9320 case COMPONENT_REF:
9321 case BIT_FIELD_REF:
9322 case ARRAY_REF:
9323 {
9324 int bitsize, bitpos, unsignedp;
9325 enum machine_mode mode;
9326 tree type;
9327 tree offset;
9328 int volatilep = 0;
9329 int alignment;
9330
9331 /* Get description of this reference. We don't actually care
9332 about the underlying object here. */
9333 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9334 &mode, &unsignedp, &volatilep,
9335 &alignment);
9336
9337 type = type_for_size (bitsize, unsignedp);
9338 if (! SLOW_BYTE_ACCESS
9339 && type != 0 && bitsize >= 0
9340 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9341 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9342 != CODE_FOR_nothing))
9343 {
9344 do_jump (convert (type, exp), if_false_label, if_true_label);
9345 break;
9346 }
9347 goto normal;
9348 }
9349
9350 case COND_EXPR:
9351 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9352 if (integer_onep (TREE_OPERAND (exp, 1))
9353 && integer_zerop (TREE_OPERAND (exp, 2)))
9354 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9355
9356 else if (integer_zerop (TREE_OPERAND (exp, 1))
9357 && integer_onep (TREE_OPERAND (exp, 2)))
9358 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9359
9360 else
9361 {
9362 register rtx label1 = gen_label_rtx ();
9363 drop_through_label = gen_label_rtx ();
9364
9365 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9366
9367 start_cleanup_deferral ();
9368 /* Now the THEN-expression. */
9369 do_jump (TREE_OPERAND (exp, 1),
9370 if_false_label ? if_false_label : drop_through_label,
9371 if_true_label ? if_true_label : drop_through_label);
9372 /* In case the do_jump just above never jumps. */
9373 do_pending_stack_adjust ();
9374 emit_label (label1);
9375
9376 /* Now the ELSE-expression. */
9377 do_jump (TREE_OPERAND (exp, 2),
9378 if_false_label ? if_false_label : drop_through_label,
9379 if_true_label ? if_true_label : drop_through_label);
9380 end_cleanup_deferral ();
9381 }
9382 break;
9383
9384 case EQ_EXPR:
9385 {
9386 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9387
9388 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9389 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9390 {
9391 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9392 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9393 do_jump
9394 (fold
9395 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9396 fold (build (EQ_EXPR, TREE_TYPE (exp),
9397 fold (build1 (REALPART_EXPR,
9398 TREE_TYPE (inner_type),
9399 exp0)),
9400 fold (build1 (REALPART_EXPR,
9401 TREE_TYPE (inner_type),
9402 exp1)))),
9403 fold (build (EQ_EXPR, TREE_TYPE (exp),
9404 fold (build1 (IMAGPART_EXPR,
9405 TREE_TYPE (inner_type),
9406 exp0)),
9407 fold (build1 (IMAGPART_EXPR,
9408 TREE_TYPE (inner_type),
9409 exp1)))))),
9410 if_false_label, if_true_label);
9411 }
9412
9413 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9414 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9415
9416 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9417 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
9418 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9419 else
9420 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9421 break;
9422 }
9423
9424 case NE_EXPR:
9425 {
9426 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9427
9428 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9429 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9430 {
9431 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9432 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9433 do_jump
9434 (fold
9435 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9436 fold (build (NE_EXPR, TREE_TYPE (exp),
9437 fold (build1 (REALPART_EXPR,
9438 TREE_TYPE (inner_type),
9439 exp0)),
9440 fold (build1 (REALPART_EXPR,
9441 TREE_TYPE (inner_type),
9442 exp1)))),
9443 fold (build (NE_EXPR, TREE_TYPE (exp),
9444 fold (build1 (IMAGPART_EXPR,
9445 TREE_TYPE (inner_type),
9446 exp0)),
9447 fold (build1 (IMAGPART_EXPR,
9448 TREE_TYPE (inner_type),
9449 exp1)))))),
9450 if_false_label, if_true_label);
9451 }
9452
9453 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9454 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9455
9456 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9457 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
9458 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9459 else
9460 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9461 break;
9462 }
9463
9464 case LT_EXPR:
9465 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9466 if (GET_MODE_CLASS (mode) == MODE_INT
9467 && ! can_compare_p (mode, ccp_jump))
9468 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9469 else
9470 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9471 break;
9472
9473 case LE_EXPR:
9474 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9475 if (GET_MODE_CLASS (mode) == MODE_INT
9476 && ! can_compare_p (mode, ccp_jump))
9477 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9478 else
9479 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9480 break;
9481
9482 case GT_EXPR:
9483 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9484 if (GET_MODE_CLASS (mode) == MODE_INT
9485 && ! can_compare_p (mode, ccp_jump))
9486 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9487 else
9488 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9489 break;
9490
9491 case GE_EXPR:
9492 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9493 if (GET_MODE_CLASS (mode) == MODE_INT
9494 && ! can_compare_p (mode, ccp_jump))
9495 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9496 else
9497 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9498 break;
9499
9500 default:
9501 normal:
9502 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9503 #if 0
9504 /* This is not needed any more and causes poor code since it causes
9505 comparisons and tests from non-SI objects to have different code
9506 sequences. */
9507 /* Copy to register to avoid generating bad insns by cse
9508 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9509 if (!cse_not_expected && GET_CODE (temp) == MEM)
9510 temp = copy_to_reg (temp);
9511 #endif
9512 do_pending_stack_adjust ();
9513 /* Do any postincrements in the expression that was tested. */
9514 emit_queue ();
9515
9516 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9517 {
9518 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9519 if (target)
9520 emit_jump (target);
9521 }
9522 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9523 && ! can_compare_p (GET_MODE (temp), ccp_jump))
9524 /* Note swapping the labels gives us not-equal. */
9525 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9526 else if (GET_MODE (temp) != VOIDmode)
9527 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9528 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9529 GET_MODE (temp), NULL_RTX, 0,
9530 if_false_label, if_true_label);
9531 else
9532 abort ();
9533 }
9534
9535 if (drop_through_label)
9536 {
9537 /* If do_jump produces code that might be jumped around,
9538 do any stack adjusts from that code, before the place
9539 where control merges in. */
9540 do_pending_stack_adjust ();
9541 emit_label (drop_through_label);
9542 }
9543 }
9544 \f
9545 /* Given a comparison expression EXP for values too wide to be compared
9546 with one insn, test the comparison and jump to the appropriate label.
9547 The code of EXP is ignored; we always test GT if SWAP is 0,
9548 and LT if SWAP is 1. */
9549
9550 static void
9551 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9552 tree exp;
9553 int swap;
9554 rtx if_false_label, if_true_label;
9555 {
9556 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9557 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9558 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9559 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9560
9561 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9562 }
9563
9564 /* Compare OP0 with OP1, word at a time, in mode MODE.
9565 UNSIGNEDP says to do unsigned comparison.
9566 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9567
9568 void
9569 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9570 enum machine_mode mode;
9571 int unsignedp;
9572 rtx op0, op1;
9573 rtx if_false_label, if_true_label;
9574 {
9575 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9576 rtx drop_through_label = 0;
9577 int i;
9578
9579 if (! if_true_label || ! if_false_label)
9580 drop_through_label = gen_label_rtx ();
9581 if (! if_true_label)
9582 if_true_label = drop_through_label;
9583 if (! if_false_label)
9584 if_false_label = drop_through_label;
9585
9586 /* Compare a word at a time, high order first. */
9587 for (i = 0; i < nwords; i++)
9588 {
9589 rtx op0_word, op1_word;
9590
9591 if (WORDS_BIG_ENDIAN)
9592 {
9593 op0_word = operand_subword_force (op0, i, mode);
9594 op1_word = operand_subword_force (op1, i, mode);
9595 }
9596 else
9597 {
9598 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9599 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9600 }
9601
9602 /* All but high-order word must be compared as unsigned. */
9603 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9604 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9605 NULL_RTX, if_true_label);
9606
9607 /* Consider lower words only if these are equal. */
9608 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9609 NULL_RTX, 0, NULL_RTX, if_false_label);
9610 }
9611
9612 if (if_false_label)
9613 emit_jump (if_false_label);
9614 if (drop_through_label)
9615 emit_label (drop_through_label);
9616 }
9617
9618 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9619 with one insn, test the comparison and jump to the appropriate label. */
9620
9621 static void
9622 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9623 tree exp;
9624 rtx if_false_label, if_true_label;
9625 {
9626 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9627 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9628 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9629 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9630 int i;
9631 rtx drop_through_label = 0;
9632
9633 if (! if_false_label)
9634 drop_through_label = if_false_label = gen_label_rtx ();
9635
9636 for (i = 0; i < nwords; i++)
9637 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9638 operand_subword_force (op1, i, mode),
9639 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9640 word_mode, NULL_RTX, 0, if_false_label,
9641 NULL_RTX);
9642
9643 if (if_true_label)
9644 emit_jump (if_true_label);
9645 if (drop_through_label)
9646 emit_label (drop_through_label);
9647 }
9648 \f
9649 /* Jump according to whether OP0 is 0.
9650 We assume that OP0 has an integer mode that is too wide
9651 for the available compare insns. */
9652
9653 void
9654 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9655 rtx op0;
9656 rtx if_false_label, if_true_label;
9657 {
9658 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9659 rtx part;
9660 int i;
9661 rtx drop_through_label = 0;
9662
9663 /* The fastest way of doing this comparison on almost any machine is to
9664 "or" all the words and compare the result. If all have to be loaded
9665 from memory and this is a very wide item, it's possible this may
9666 be slower, but that's highly unlikely. */
9667
9668 part = gen_reg_rtx (word_mode);
9669 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9670 for (i = 1; i < nwords && part != 0; i++)
9671 part = expand_binop (word_mode, ior_optab, part,
9672 operand_subword_force (op0, i, GET_MODE (op0)),
9673 part, 1, OPTAB_WIDEN);
9674
9675 if (part != 0)
9676 {
9677 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9678 NULL_RTX, 0, if_false_label, if_true_label);
9679
9680 return;
9681 }
9682
9683 /* If we couldn't do the "or" simply, do this with a series of compares. */
9684 if (! if_false_label)
9685 drop_through_label = if_false_label = gen_label_rtx ();
9686
9687 for (i = 0; i < nwords; i++)
9688 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9689 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9690 if_false_label, NULL_RTX);
9691
9692 if (if_true_label)
9693 emit_jump (if_true_label);
9694
9695 if (drop_through_label)
9696 emit_label (drop_through_label);
9697 }
9698 \f
9699 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9700 (including code to compute the values to be compared)
9701 and set (CC0) according to the result.
9702 The decision as to signed or unsigned comparison must be made by the caller.
9703
9704 We force a stack adjustment unless there are currently
9705 things pushed on the stack that aren't yet used.
9706
9707 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9708 compared.
9709
9710 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9711 size of MODE should be used. */
9712
9713 rtx
9714 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9715 register rtx op0, op1;
9716 enum rtx_code code;
9717 int unsignedp;
9718 enum machine_mode mode;
9719 rtx size;
9720 int align;
9721 {
9722 rtx tem;
9723
9724 /* If one operand is constant, make it the second one. Only do this
9725 if the other operand is not constant as well. */
9726
9727 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9728 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9729 {
9730 tem = op0;
9731 op0 = op1;
9732 op1 = tem;
9733 code = swap_condition (code);
9734 }
9735
9736 if (flag_force_mem)
9737 {
9738 op0 = force_not_mem (op0);
9739 op1 = force_not_mem (op1);
9740 }
9741
9742 do_pending_stack_adjust ();
9743
9744 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9745 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9746 return tem;
9747
9748 #if 0
9749 /* There's no need to do this now that combine.c can eliminate lots of
9750 sign extensions. This can be less efficient in certain cases on other
9751 machines. */
9752
9753 /* If this is a signed equality comparison, we can do it as an
9754 unsigned comparison since zero-extension is cheaper than sign
9755 extension and comparisons with zero are done as unsigned. This is
9756 the case even on machines that can do fast sign extension, since
9757 zero-extension is easier to combine with other operations than
9758 sign-extension is. If we are comparing against a constant, we must
9759 convert it to what it would look like unsigned. */
9760 if ((code == EQ || code == NE) && ! unsignedp
9761 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9762 {
9763 if (GET_CODE (op1) == CONST_INT
9764 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9765 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9766 unsignedp = 1;
9767 }
9768 #endif
9769
9770 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9771
9772 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9773 }
9774
9775 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9776 The decision as to signed or unsigned comparison must be made by the caller.
9777
9778 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9779 compared.
9780
9781 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9782 size of MODE should be used. */
9783
9784 void
9785 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9786 if_false_label, if_true_label)
9787 register rtx op0, op1;
9788 enum rtx_code code;
9789 int unsignedp;
9790 enum machine_mode mode;
9791 rtx size;
9792 int align;
9793 rtx if_false_label, if_true_label;
9794 {
9795 rtx tem;
9796 int dummy_true_label = 0;
9797
9798 /* Reverse the comparison if that is safe and we want to jump if it is
9799 false. */
9800 if (! if_true_label && ! FLOAT_MODE_P (mode))
9801 {
9802 if_true_label = if_false_label;
9803 if_false_label = 0;
9804 code = reverse_condition (code);
9805 }
9806
9807 /* If one operand is constant, make it the second one. Only do this
9808 if the other operand is not constant as well. */
9809
9810 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9811 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9812 {
9813 tem = op0;
9814 op0 = op1;
9815 op1 = tem;
9816 code = swap_condition (code);
9817 }
9818
9819 if (flag_force_mem)
9820 {
9821 op0 = force_not_mem (op0);
9822 op1 = force_not_mem (op1);
9823 }
9824
9825 do_pending_stack_adjust ();
9826
9827 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9828 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9829 {
9830 if (tem == const_true_rtx)
9831 {
9832 if (if_true_label)
9833 emit_jump (if_true_label);
9834 }
9835 else
9836 {
9837 if (if_false_label)
9838 emit_jump (if_false_label);
9839 }
9840 return;
9841 }
9842
9843 #if 0
9844 /* There's no need to do this now that combine.c can eliminate lots of
9845 sign extensions. This can be less efficient in certain cases on other
9846 machines. */
9847
9848 /* If this is a signed equality comparison, we can do it as an
9849 unsigned comparison since zero-extension is cheaper than sign
9850 extension and comparisons with zero are done as unsigned. This is
9851 the case even on machines that can do fast sign extension, since
9852 zero-extension is easier to combine with other operations than
9853 sign-extension is. If we are comparing against a constant, we must
9854 convert it to what it would look like unsigned. */
9855 if ((code == EQ || code == NE) && ! unsignedp
9856 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9857 {
9858 if (GET_CODE (op1) == CONST_INT
9859 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9860 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9861 unsignedp = 1;
9862 }
9863 #endif
9864
9865 if (! if_true_label)
9866 {
9867 dummy_true_label = 1;
9868 if_true_label = gen_label_rtx ();
9869 }
9870
9871 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9872 if_true_label);
9873
9874 if (if_false_label)
9875 emit_jump (if_false_label);
9876 if (dummy_true_label)
9877 emit_label (if_true_label);
9878 }
9879
9880 /* Generate code for a comparison expression EXP (including code to compute
9881 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9882 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9883 generated code will drop through.
9884 SIGNED_CODE should be the rtx operation for this comparison for
9885 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9886
9887 We force a stack adjustment unless there are currently
9888 things pushed on the stack that aren't yet used. */
9889
9890 static void
9891 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9892 if_true_label)
9893 register tree exp;
9894 enum rtx_code signed_code, unsigned_code;
9895 rtx if_false_label, if_true_label;
9896 {
9897 int align0, align1;
9898 register rtx op0, op1;
9899 register tree type;
9900 register enum machine_mode mode;
9901 int unsignedp;
9902 enum rtx_code code;
9903
9904 /* Don't crash if the comparison was erroneous. */
9905 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9906 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9907 return;
9908
9909 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
9910 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9911 mode = TYPE_MODE (type);
9912 unsignedp = TREE_UNSIGNED (type);
9913 code = unsignedp ? unsigned_code : signed_code;
9914
9915 #ifdef HAVE_canonicalize_funcptr_for_compare
9916 /* If function pointers need to be "canonicalized" before they can
9917 be reliably compared, then canonicalize them. */
9918 if (HAVE_canonicalize_funcptr_for_compare
9919 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9920 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9921 == FUNCTION_TYPE))
9922 {
9923 rtx new_op0 = gen_reg_rtx (mode);
9924
9925 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9926 op0 = new_op0;
9927 }
9928
9929 if (HAVE_canonicalize_funcptr_for_compare
9930 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9931 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9932 == FUNCTION_TYPE))
9933 {
9934 rtx new_op1 = gen_reg_rtx (mode);
9935
9936 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9937 op1 = new_op1;
9938 }
9939 #endif
9940
9941 /* Do any postincrements in the expression that was tested. */
9942 emit_queue ();
9943
9944 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9945 ((mode == BLKmode)
9946 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9947 MIN (align0, align1) / BITS_PER_UNIT,
9948 if_false_label, if_true_label);
9949 }
9950 \f
9951 /* Generate code to calculate EXP using a store-flag instruction
9952 and return an rtx for the result. EXP is either a comparison
9953 or a TRUTH_NOT_EXPR whose operand is a comparison.
9954
9955 If TARGET is nonzero, store the result there if convenient.
9956
9957 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9958 cheap.
9959
9960 Return zero if there is no suitable set-flag instruction
9961 available on this machine.
9962
9963 Once expand_expr has been called on the arguments of the comparison,
9964 we are committed to doing the store flag, since it is not safe to
9965 re-evaluate the expression. We emit the store-flag insn by calling
9966 emit_store_flag, but only expand the arguments if we have a reason
9967 to believe that emit_store_flag will be successful. If we think that
9968 it will, but it isn't, we have to simulate the store-flag with a
9969 set/jump/set sequence. */
9970
9971 static rtx
9972 do_store_flag (exp, target, mode, only_cheap)
9973 tree exp;
9974 rtx target;
9975 enum machine_mode mode;
9976 int only_cheap;
9977 {
9978 enum rtx_code code;
9979 tree arg0, arg1, type;
9980 tree tem;
9981 enum machine_mode operand_mode;
9982 int invert = 0;
9983 int unsignedp;
9984 rtx op0, op1;
9985 enum insn_code icode;
9986 rtx subtarget = target;
9987 rtx result, label;
9988
9989 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9990 result at the end. We can't simply invert the test since it would
9991 have already been inverted if it were valid. This case occurs for
9992 some floating-point comparisons. */
9993
9994 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9995 invert = 1, exp = TREE_OPERAND (exp, 0);
9996
9997 arg0 = TREE_OPERAND (exp, 0);
9998 arg1 = TREE_OPERAND (exp, 1);
9999 type = TREE_TYPE (arg0);
10000 operand_mode = TYPE_MODE (type);
10001 unsignedp = TREE_UNSIGNED (type);
10002
10003 /* We won't bother with BLKmode store-flag operations because it would mean
10004 passing a lot of information to emit_store_flag. */
10005 if (operand_mode == BLKmode)
10006 return 0;
10007
10008 /* We won't bother with store-flag operations involving function pointers
10009 when function pointers must be canonicalized before comparisons. */
10010 #ifdef HAVE_canonicalize_funcptr_for_compare
10011 if (HAVE_canonicalize_funcptr_for_compare
10012 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10013 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10014 == FUNCTION_TYPE))
10015 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10016 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10017 == FUNCTION_TYPE))))
10018 return 0;
10019 #endif
10020
10021 STRIP_NOPS (arg0);
10022 STRIP_NOPS (arg1);
10023
10024 /* Get the rtx comparison code to use. We know that EXP is a comparison
10025 operation of some type. Some comparisons against 1 and -1 can be
10026 converted to comparisons with zero. Do so here so that the tests
10027 below will be aware that we have a comparison with zero. These
10028 tests will not catch constants in the first operand, but constants
10029 are rarely passed as the first operand. */
10030
10031 switch (TREE_CODE (exp))
10032 {
10033 case EQ_EXPR:
10034 code = EQ;
10035 break;
10036 case NE_EXPR:
10037 code = NE;
10038 break;
10039 case LT_EXPR:
10040 if (integer_onep (arg1))
10041 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10042 else
10043 code = unsignedp ? LTU : LT;
10044 break;
10045 case LE_EXPR:
10046 if (! unsignedp && integer_all_onesp (arg1))
10047 arg1 = integer_zero_node, code = LT;
10048 else
10049 code = unsignedp ? LEU : LE;
10050 break;
10051 case GT_EXPR:
10052 if (! unsignedp && integer_all_onesp (arg1))
10053 arg1 = integer_zero_node, code = GE;
10054 else
10055 code = unsignedp ? GTU : GT;
10056 break;
10057 case GE_EXPR:
10058 if (integer_onep (arg1))
10059 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10060 else
10061 code = unsignedp ? GEU : GE;
10062 break;
10063 default:
10064 abort ();
10065 }
10066
10067 /* Put a constant second. */
10068 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10069 {
10070 tem = arg0; arg0 = arg1; arg1 = tem;
10071 code = swap_condition (code);
10072 }
10073
10074 /* If this is an equality or inequality test of a single bit, we can
10075 do this by shifting the bit being tested to the low-order bit and
10076 masking the result with the constant 1. If the condition was EQ,
10077 we xor it with 1. This does not require an scc insn and is faster
10078 than an scc insn even if we have it. */
10079
10080 if ((code == NE || code == EQ)
10081 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10082 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10083 {
10084 tree inner = TREE_OPERAND (arg0, 0);
10085 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10086 int ops_unsignedp;
10087
10088 /* If INNER is a right shift of a constant and it plus BITNUM does
10089 not overflow, adjust BITNUM and INNER. */
10090
10091 if (TREE_CODE (inner) == RSHIFT_EXPR
10092 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10093 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10094 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10095 < TYPE_PRECISION (type)))
10096 {
10097 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10098 inner = TREE_OPERAND (inner, 0);
10099 }
10100
10101 /* If we are going to be able to omit the AND below, we must do our
10102 operations as unsigned. If we must use the AND, we have a choice.
10103 Normally unsigned is faster, but for some machines signed is. */
10104 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10105 #ifdef LOAD_EXTEND_OP
10106 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10107 #else
10108 : 1
10109 #endif
10110 );
10111
10112 if (subtarget == 0 || GET_CODE (subtarget) != REG
10113 || GET_MODE (subtarget) != operand_mode
10114 || ! safe_from_p (subtarget, inner, 1))
10115 subtarget = 0;
10116
10117 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10118
10119 if (bitnum != 0)
10120 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10121 size_int (bitnum), subtarget, ops_unsignedp);
10122
10123 if (GET_MODE (op0) != mode)
10124 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10125
10126 if ((code == EQ && ! invert) || (code == NE && invert))
10127 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10128 ops_unsignedp, OPTAB_LIB_WIDEN);
10129
10130 /* Put the AND last so it can combine with more things. */
10131 if (bitnum != TYPE_PRECISION (type) - 1)
10132 op0 = expand_and (op0, const1_rtx, subtarget);
10133
10134 return op0;
10135 }
10136
10137 /* Now see if we are likely to be able to do this. Return if not. */
10138 if (! can_compare_p (operand_mode, ccp_store_flag))
10139 return 0;
10140 icode = setcc_gen_code[(int) code];
10141 if (icode == CODE_FOR_nothing
10142 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10143 {
10144 /* We can only do this if it is one of the special cases that
10145 can be handled without an scc insn. */
10146 if ((code == LT && integer_zerop (arg1))
10147 || (! only_cheap && code == GE && integer_zerop (arg1)))
10148 ;
10149 else if (BRANCH_COST >= 0
10150 && ! only_cheap && (code == NE || code == EQ)
10151 && TREE_CODE (type) != REAL_TYPE
10152 && ((abs_optab->handlers[(int) operand_mode].insn_code
10153 != CODE_FOR_nothing)
10154 || (ffs_optab->handlers[(int) operand_mode].insn_code
10155 != CODE_FOR_nothing)))
10156 ;
10157 else
10158 return 0;
10159 }
10160
10161 preexpand_calls (exp);
10162 if (subtarget == 0 || GET_CODE (subtarget) != REG
10163 || GET_MODE (subtarget) != operand_mode
10164 || ! safe_from_p (subtarget, arg1, 1))
10165 subtarget = 0;
10166
10167 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10168 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10169
10170 if (target == 0)
10171 target = gen_reg_rtx (mode);
10172
10173 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10174 because, if the emit_store_flag does anything it will succeed and
10175 OP0 and OP1 will not be used subsequently. */
10176
10177 result = emit_store_flag (target, code,
10178 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10179 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10180 operand_mode, unsignedp, 1);
10181
10182 if (result)
10183 {
10184 if (invert)
10185 result = expand_binop (mode, xor_optab, result, const1_rtx,
10186 result, 0, OPTAB_LIB_WIDEN);
10187 return result;
10188 }
10189
10190 /* If this failed, we have to do this with set/compare/jump/set code. */
10191 if (GET_CODE (target) != REG
10192 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10193 target = gen_reg_rtx (GET_MODE (target));
10194
10195 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10196 result = compare_from_rtx (op0, op1, code, unsignedp,
10197 operand_mode, NULL_RTX, 0);
10198 if (GET_CODE (result) == CONST_INT)
10199 return (((result == const0_rtx && ! invert)
10200 || (result != const0_rtx && invert))
10201 ? const0_rtx : const1_rtx);
10202
10203 label = gen_label_rtx ();
10204 if (bcc_gen_fctn[(int) code] == 0)
10205 abort ();
10206
10207 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10208 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10209 emit_label (label);
10210
10211 return target;
10212 }
10213 \f
10214 /* Generate a tablejump instruction (used for switch statements). */
10215
10216 #ifdef HAVE_tablejump
10217
10218 /* INDEX is the value being switched on, with the lowest value
10219 in the table already subtracted.
10220 MODE is its expected mode (needed if INDEX is constant).
10221 RANGE is the length of the jump table.
10222 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10223
10224 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10225 index value is out of range. */
10226
10227 void
10228 do_tablejump (index, mode, range, table_label, default_label)
10229 rtx index, range, table_label, default_label;
10230 enum machine_mode mode;
10231 {
10232 register rtx temp, vector;
10233
10234 /* Do an unsigned comparison (in the proper mode) between the index
10235 expression and the value which represents the length of the range.
10236 Since we just finished subtracting the lower bound of the range
10237 from the index expression, this comparison allows us to simultaneously
10238 check that the original index expression value is both greater than
10239 or equal to the minimum value of the range and less than or equal to
10240 the maximum value of the range. */
10241
10242 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10243 0, default_label);
10244
10245 /* If index is in range, it must fit in Pmode.
10246 Convert to Pmode so we can index with it. */
10247 if (mode != Pmode)
10248 index = convert_to_mode (Pmode, index, 1);
10249
10250 /* Don't let a MEM slip thru, because then INDEX that comes
10251 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10252 and break_out_memory_refs will go to work on it and mess it up. */
10253 #ifdef PIC_CASE_VECTOR_ADDRESS
10254 if (flag_pic && GET_CODE (index) != REG)
10255 index = copy_to_mode_reg (Pmode, index);
10256 #endif
10257
10258 /* If flag_force_addr were to affect this address
10259 it could interfere with the tricky assumptions made
10260 about addresses that contain label-refs,
10261 which may be valid only very near the tablejump itself. */
10262 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10263 GET_MODE_SIZE, because this indicates how large insns are. The other
10264 uses should all be Pmode, because they are addresses. This code
10265 could fail if addresses and insns are not the same size. */
10266 index = gen_rtx_PLUS (Pmode,
10267 gen_rtx_MULT (Pmode, index,
10268 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10269 gen_rtx_LABEL_REF (Pmode, table_label));
10270 #ifdef PIC_CASE_VECTOR_ADDRESS
10271 if (flag_pic)
10272 index = PIC_CASE_VECTOR_ADDRESS (index);
10273 else
10274 #endif
10275 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10276 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10277 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10278 RTX_UNCHANGING_P (vector) = 1;
10279 convert_move (temp, vector, 0);
10280
10281 emit_jump_insn (gen_tablejump (temp, table_label));
10282
10283 /* If we are generating PIC code or if the table is PC-relative, the
10284 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10285 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10286 emit_barrier ();
10287 }
10288
10289 #endif /* HAVE_tablejump */